From 211d74252b506bf0aa7ced5053428004a57c2ae9 Mon Sep 17 00:00:00 2001
From: Valentin Stanciu <250871+svalentin@users.noreply.github.com>
Date: Fri, 14 Jul 2023 14:13:38 +0300
Subject: [PATCH 001/288] Update dev version to 1.6.0+dev (#15671)

Created the [release-1.5
branch](https://github.com/python/mypy/tree/release-1.5), updating
version
---
 mypy/version.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypy/version.py b/mypy/version.py
index 42cda2fc7794..512890ce7d2b 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -8,7 +8,7 @@
 # - Release versions have the form "1.2.3".
 # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440).
 # - Before 1.0 we had the form "0.NNN".
-__version__ = "1.5.0+dev"
+__version__ = "1.6.0+dev"
 base_version = __version__
 
 mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))

From a538cc98d54031f25e44787a90649ea909877f12 Mon Sep 17 00:00:00 2001
From: Valentin Stanciu <250871+svalentin@users.noreply.github.com>
Date: Fri, 14 Jul 2023 14:33:32 +0300
Subject: [PATCH 002/288] fix cherry-pick-typeshed (#15672)

It should exclude test_cases too
---
 misc/cherry-pick-typeshed.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/misc/cherry-pick-typeshed.py b/misc/cherry-pick-typeshed.py
index af08009c2a8f..7e3b8b56e65f 100644
--- a/misc/cherry-pick-typeshed.py
+++ b/misc/cherry-pick-typeshed.py
@@ -53,6 +53,7 @@ def main() -> None:
                 "--index",
                 "--directory=mypy/typeshed",
                 "--exclude=**/tests/**",
+                "--exclude=**/test_cases/**",
                 diff_file,
             ],
             check=True,

From 1958cb62f4de7492fb154323f3fdb7a0b6b51fa7 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Fri, 14 Jul 2023 20:18:54 +0200
Subject: [PATCH 003/288] Remove `--py2` argument (#15670)

---
 mypy/defaults.py     | 2 --
 mypy/main.py         | 8 --------
 mypy/test/helpers.py | 2 +-
 3 files changed, 1 insertion(+), 11 deletions(-)

diff --git a/mypy/defaults.py b/mypy/defaults.py
index 2a881975a27c..1bd87de74bc9 100644
--- a/mypy/defaults.py
+++ b/mypy/defaults.py
@@ -3,8 +3,6 @@
 import os
 from typing import Final
 
-PYTHON2_VERSION: Final = (2, 7)
-
 # Earliest fully supported Python 3.x version. Used as the default Python
 # version in tests. Mypy wheels should be built starting with this version,
 # and CI tests should be run on this version (and later versions).
diff --git a/mypy/main.py b/mypy/main.py
index f6e617e4d84f..6173fd6fc1a8 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -594,14 +594,6 @@ def add_invertible_flag(
         help="Type check code assuming it will be running on Python x.y",
         dest="special-opts:python_version",
     )
-    platform_group.add_argument(
-        "-2",
-        "--py2",
-        dest="special-opts:python_version",
-        action="store_const",
-        const=defaults.PYTHON2_VERSION,
-        help="Use Python 2 mode (same as --python-version 2.7)",
-    )
     platform_group.add_argument(
         "--platform",
         action="store",
diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py
index d2c92614048a..d1850219e60a 100644
--- a/mypy/test/helpers.py
+++ b/mypy/test/helpers.py
@@ -345,7 +345,7 @@ def parse_options(
         options.force_union_syntax = True
 
     # Allow custom python version to override testfile_pyversion.
-    if all(flag.split("=")[0] not in ["--python-version", "-2", "--py2"] for flag in flag_list):
+    if all(flag.split("=")[0] != "--python-version" for flag in flag_list):
         options.python_version = testfile_pyversion(testcase.file)
 
     if testcase.config.getoption("--mypy-verbose"):

From 14743a1cdd2a07ecc56ce01cc9d54130fb32931e Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sat, 15 Jul 2023 10:51:04 +0300
Subject: [PATCH 004/288] Bump minimum Python type check target version to 3.7
 (#15668)

---
 mypy/checkexpr.py                          |  2 +-
 mypy/checkstrformat.py                     | 15 ------
 mypy/defaults.py                           |  2 +-
 mypy/messages.py                           | 13 +----
 mypy/semanal.py                            | 18 ++-----
 mypy/semanal_namedtuple.py                 |  3 --
 mypy/semanal_pass1.py                      |  7 ++-
 test-data/unit/README.md                   |  2 +-
 test-data/unit/check-async-await.test      | 40 ++-------------
 test-data/unit/check-class-namedtuple.test | 58 ++--------------------
 test-data/unit/check-fastparse.test        |  1 -
 test-data/unit/check-flags.test            | 12 ++---
 test-data/unit/check-formatting.test       |  7 ---
 test-data/unit/check-generic-alias.test    |  4 --
 test-data/unit/check-generics.test         |  2 +-
 test-data/unit/check-incremental.test      |  1 -
 test-data/unit/check-inference.test        |  1 -
 test-data/unit/check-modules.test          | 21 --------
 test-data/unit/check-namedtuple.test       | 13 +----
 test-data/unit/check-narrowing.test        |  1 -
 test-data/unit/check-newsemanal.test       | 12 -----
 test-data/unit/check-newsyntax.test        | 41 ++++-----------
 test-data/unit/check-singledispatch.test   | 14 ------
 test-data/unit/check-tuples.test           |  3 +-
 test-data/unit/check-typeddict.test        | 16 +-----
 test-data/unit/check-underscores.test      |  6 ---
 test-data/unit/check-union-or-syntax.test  | 11 ++--
 test-data/unit/check-unreachable-code.test |  6 +--
 test-data/unit/cmdline.test                | 22 ++++----
 test-data/unit/daemon.test                 | 20 ++++----
 test-data/unit/parse-errors.test           | 11 +---
 test-data/unit/pythoneval.test             |  2 +-
 test-data/unit/reports.test                |  2 +-
 33 files changed, 75 insertions(+), 314 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 46a5e35f320d..62e2298ba59d 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -3072,7 +3072,7 @@ def visit_op_expr(self, e: OpExpr) -> Type:
             # Expressions of form [...] * e get special type inference.
             return self.check_list_multiply(e)
         if e.op == "%":
-            if isinstance(e.left, BytesExpr) and self.chk.options.python_version >= (3, 5):
+            if isinstance(e.left, BytesExpr):
                 return self.strfrm_checker.check_str_interpolation(e.left, e.right)
             if isinstance(e.left, StrExpr):
                 return self.strfrm_checker.check_str_interpolation(e.left, e.right)
diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py
index cda603be086b..eeb9e7633756 100644
--- a/mypy/checkstrformat.py
+++ b/mypy/checkstrformat.py
@@ -682,14 +682,6 @@ def check_str_interpolation(self, expr: FormatStringExpr, replacements: Expressi
         self.exprchk.accept(expr)
         specifiers = parse_conversion_specifiers(expr.value)
         has_mapping_keys = self.analyze_conversion_specifiers(specifiers, expr)
-        if isinstance(expr, BytesExpr) and self.chk.options.python_version < (3, 5):
-            self.msg.fail(
-                "Bytes formatting is only supported in Python 3.5 and later",
-                replacements,
-                code=codes.STRING_FORMATTING,
-            )
-            return AnyType(TypeOfAny.from_error)
-
         if has_mapping_keys is None:
             pass  # Error was reported
         elif has_mapping_keys:
@@ -1023,13 +1015,6 @@ def conversion_type(
         NUMERIC_TYPES = NUMERIC_TYPES_NEW if format_call else NUMERIC_TYPES_OLD
         INT_TYPES = REQUIRE_INT_NEW if format_call else REQUIRE_INT_OLD
         if p == "b" and not format_call:
-            if self.chk.options.python_version < (3, 5):
-                self.msg.fail(
-                    'Format character "b" is only supported in Python 3.5 and later',
-                    context,
-                    code=codes.STRING_FORMATTING,
-                )
-                return None
             if not isinstance(expr, BytesExpr):
                 self.msg.fail(
                     'Format character "b" is only supported on bytes patterns',
diff --git a/mypy/defaults.py b/mypy/defaults.py
index 1bd87de74bc9..6a09a61a461e 100644
--- a/mypy/defaults.py
+++ b/mypy/defaults.py
@@ -10,7 +10,7 @@
 
 # Earliest Python 3.x version supported via --python-version 3.x. To run
 # mypy, at least version PYTHON3_VERSION is needed.
-PYTHON3_VERSION_MIN: Final = (3, 4)
+PYTHON3_VERSION_MIN: Final = (3, 7)  # Keep in sync with typeshed's python support
 
 CACHE_DIR: Final = ".mypy_cache"
 CONFIG_FILE: Final = ["mypy.ini", ".mypy.ini"]
diff --git a/mypy/messages.py b/mypy/messages.py
index ae7fba1473ac..8b88cc1678a4 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -1728,7 +1728,6 @@ def need_annotation_for_var(
         self, node: SymbolNode, context: Context, python_version: tuple[int, int] | None = None
     ) -> None:
         hint = ""
-        has_variable_annotations = not python_version or python_version >= (3, 6)
         pep604_supported = not python_version or python_version >= (3, 10)
         # type to recommend the user adds
         recommended_type = None
@@ -1749,18 +1748,10 @@ def need_annotation_for_var(
                     type_dec = f"{type_dec}, {type_dec}"
                 recommended_type = f"{alias}[{type_dec}]"
         if recommended_type is not None:
-            if has_variable_annotations:
-                hint = f' (hint: "{node.name}: {recommended_type} = ...")'
-            else:
-                hint = f' (hint: "{node.name} = ...  # type: {recommended_type}")'
-
-        if has_variable_annotations:
-            needed = "annotation"
-        else:
-            needed = "comment"
+            hint = f' (hint: "{node.name}: {recommended_type} = ...")'
 
         self.fail(
-            f'Need type {needed} for "{unmangle(node.name)}"{hint}',
+            f'Need type annotation for "{unmangle(node.name)}"{hint}',
             context,
             code=codes.VAR_ANNOTATED,
         )
diff --git a/mypy/semanal.py b/mypy/semanal.py
index f4f281e7a77a..5b1aea4239f5 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -2521,12 +2521,7 @@ def visit_import_from(self, imp: ImportFrom) -> None:
                 elif fullname in self.missing_modules:
                     missing_submodule = True
             # If it is still not resolved, check for a module level __getattr__
-            if (
-                module
-                and not node
-                and (module.is_stub or self.options.python_version >= (3, 7))
-                and "__getattr__" in module.names
-            ):
+            if module and not node and "__getattr__" in module.names:
                 # We store the fullname of the original definition so that we can
                 # detect whether two imported names refer to the same thing.
                 fullname = module_id + "." + id
@@ -5446,11 +5441,8 @@ def visit_yield_expr(self, e: YieldExpr) -> None:
                 blocker=True,
             )
         elif self.function_stack[-1].is_coroutine:
-            if self.options.python_version < (3, 6):
-                self.fail('"yield" in async function', e, serious=True, blocker=True)
-            else:
-                self.function_stack[-1].is_generator = True
-                self.function_stack[-1].is_async_generator = True
+            self.function_stack[-1].is_generator = True
+            self.function_stack[-1].is_async_generator = True
         else:
             self.function_stack[-1].is_generator = True
         if e.expr:
@@ -5721,9 +5713,7 @@ def get_module_symbol(self, node: MypyFile, name: str) -> SymbolTableNode | None
                 sym = SymbolTableNode(GDEF, self.modules[fullname])
             elif self.is_incomplete_namespace(module):
                 self.record_incomplete_ref()
-            elif "__getattr__" in names and (
-                node.is_stub or self.options.python_version >= (3, 7)
-            ):
+            elif "__getattr__" in names:
                 gvar = self.create_getattr_var(names["__getattr__"], name, fullname)
                 if gvar:
                     sym = SymbolTableNode(GDEF, gvar)
diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py
index 42f7b10f3333..51ea90e07f3d 100644
--- a/mypy/semanal_namedtuple.py
+++ b/mypy/semanal_namedtuple.py
@@ -142,9 +142,6 @@ def check_namedtuple_classdef(
           * valid statements
         or None, if any of the types are not ready.
         """
-        if self.options.python_version < (3, 6) and not is_stub_file:
-            self.fail("NamedTuple class syntax is only supported in Python 3.6", defn)
-            return [], [], {}, []
         if len(defn.base_type_exprs) > 1:
             self.fail("NamedTuple should be a single base", defn)
         items: list[str] = []
diff --git a/mypy/semanal_pass1.py b/mypy/semanal_pass1.py
index 2df06feacca8..aaa01969217a 100644
--- a/mypy/semanal_pass1.py
+++ b/mypy/semanal_pass1.py
@@ -45,10 +45,9 @@ class SemanticAnalyzerPreAnalysis(TraverserVisitor):
 
       import sys
 
-      def do_stuff():
-          # type: () -> None:
-          if sys.python_version < (3,):
-              import xyz  # Only available in Python 2
+      def do_stuff() -> None:
+          if sys.version_info >= (3, 10):
+              import xyz  # Only available in Python 3.10+
               xyz.whatever()
           ...
 
diff --git a/test-data/unit/README.md b/test-data/unit/README.md
index f2c727b43543..5a9416603541 100644
--- a/test-data/unit/README.md
+++ b/test-data/unit/README.md
@@ -12,7 +12,7 @@ feature you added. If you added a new `check-*.test` file, it will be autodiscov
 Add the test in this format anywhere in the file:
 
     [case testNewSyntaxBasics]
-    # flags: --python-version 3.6
+    # flags: --python-version 3.10
     x: int
     x = 5
     y: int = 5
diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test
index bcf55d84ff26..3b7ef53b6bd6 100644
--- a/test-data/unit/check-async-await.test
+++ b/test-data/unit/check-async-await.test
@@ -183,7 +183,6 @@ async def f() -> None:
 [typing fixtures/typing-async.pyi]
 
 [case testAsyncForComprehension]
-# flags: --python-version 3.6
 from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple
 
 T = TypeVar('T')
@@ -223,7 +222,6 @@ async def generatorexp(obj: Iterable[int]):
 [typing fixtures/typing-async.pyi]
 
 [case testAsyncForComprehensionErrors]
-# flags: --python-version 3.6
 from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple
 
 T = TypeVar('T')
@@ -240,16 +238,10 @@ class asyncify(Generic[T], AsyncIterator[T]):
             raise StopAsyncIteration
 
 async def wrong_iterable(obj: Iterable[int]):
-    [i async for i in obj]
-    [i for i in asyncify(obj)]
-    {i: i async for i in obj}
-    {i: i for i in asyncify(obj)}
-
-[out]
-main:18: error: "Iterable[int]" has no attribute "__aiter__" (not async iterable)
-main:19: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable)
-main:20: error: "Iterable[int]" has no attribute "__aiter__" (not async iterable)
-main:21: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable)
+    [i async for i in obj]  # E: "Iterable[int]" has no attribute "__aiter__" (not async iterable)
+    [i for i in asyncify(obj)]  # E: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable)
+    {i: i async for i in obj}  # E: "Iterable[int]" has no attribute "__aiter__" (not async iterable)
+    {i: i for i in asyncify(obj)}  # E: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable)
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-async.pyi]
 
@@ -340,17 +332,6 @@ async def f() -> None:
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-async.pyi]
 
-[case testNoYieldInAsyncDef]
-# flags: --python-version 3.5
-
-async def f():
-    yield None  # E: "yield" in async function
-async def g():
-    yield  # E: "yield" in async function
-async def h():
-    x = yield  # E: "yield" in async function
-[builtins fixtures/async_await.pyi]
-
 [case testNoYieldFromInAsyncDef]
 
 async def f():
@@ -422,7 +403,6 @@ def f() -> Generator[int, str, int]:
 -- ---------------------------------------------------------------------
 
 [case testAsyncGenerator]
-# flags: --python-version 3.6
 from typing import AsyncGenerator, Generator
 
 async def f() -> int:
@@ -450,7 +430,6 @@ async def wrong_return() -> Generator[int, None, None]:  # E: The return type of
 [typing fixtures/typing-async.pyi]
 
 [case testAsyncGeneratorReturnIterator]
-# flags: --python-version 3.6
 from typing import AsyncIterator
 
 async def gen() -> AsyncIterator[int]:
@@ -466,7 +445,6 @@ async def use_gen() -> None:
 [typing fixtures/typing-async.pyi]
 
 [case testAsyncGeneratorManualIter]
-# flags: --python-version 3.6
 from typing import AsyncGenerator
 
 async def genfunc() -> AsyncGenerator[int, None]:
@@ -484,7 +462,6 @@ async def user() -> None:
 [typing fixtures/typing-async.pyi]
 
 [case testAsyncGeneratorAsend]
-# flags: --python-version 3.6
 from typing import AsyncGenerator
 
 async def f() -> None:
@@ -505,7 +482,6 @@ async def h() -> None:
 [typing fixtures/typing-async.pyi]
 
 [case testAsyncGeneratorAthrow]
-# flags: --python-version 3.6
 from typing import AsyncGenerator
 
 async def gen() -> AsyncGenerator[str, int]:
@@ -524,7 +500,6 @@ async def h() -> None:
 [typing fixtures/typing-async.pyi]
 
 [case testAsyncGeneratorNoSyncIteration]
-# flags: --python-version 3.6
 from typing import AsyncGenerator
 
 async def gen() -> AsyncGenerator[int, None]:
@@ -532,17 +507,13 @@ async def gen() -> AsyncGenerator[int, None]:
         yield i
 
 def h() -> None:
-    for i in gen():
+    for i in gen():  # E: "AsyncGenerator[int, None]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable)
         pass
 
 [builtins fixtures/dict.pyi]
 [typing fixtures/typing-async.pyi]
 
-[out]
-main:9: error: "AsyncGenerator[int, None]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable)
-
 [case testAsyncGeneratorNoYieldFrom]
-# flags: --python-version 3.6
 from typing import AsyncGenerator
 
 async def f() -> AsyncGenerator[int, None]:
@@ -555,7 +526,6 @@ async def gen() -> AsyncGenerator[int, None]:
 [typing fixtures/typing-async.pyi]
 
 [case testAsyncGeneratorNoReturnWithValue]
-# flags: --python-version 3.6
 from typing import AsyncGenerator
 
 async def return_int() -> AsyncGenerator[int, None]:
diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test
index ab2f5f3f6b48..1916cb41bb74 100644
--- a/test-data/unit/check-class-namedtuple.test
+++ b/test-data/unit/check-class-namedtuple.test
@@ -1,13 +1,4 @@
-[case testNewNamedTupleOldPythonVersion]
-# flags: --python-version 3.5
-from typing import NamedTuple
-
-class E(NamedTuple):  # E: NamedTuple class syntax is only supported in Python 3.6
-    pass
-[builtins fixtures/tuple.pyi]
-
 [case testNewNamedTupleNoUnderscoreFields]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -17,7 +8,6 @@ class X(NamedTuple):
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleAccessingAttributes]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -31,7 +21,6 @@ x.z # E: "X" has no attribute "z"
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleAttributesAreReadOnly]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -47,7 +36,6 @@ a.x = 5 # E: Property "x" defined in "X" is read-only
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleCreateWithPositionalArguments]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -62,7 +50,6 @@ x = X(1, '2', 3)  # E: Too many arguments for "X"
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleShouldBeSingleBase]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class A: ...
@@ -71,7 +58,6 @@ class X(NamedTuple, A):  # E: NamedTuple should be a single base
 [builtins fixtures/tuple.pyi]
 
 [case testCreateNewNamedTupleWithKeywordArguments]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -85,7 +71,6 @@ x = X(y='x') # E: Missing positional argument "x" in call to "X"
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleCreateAndUseAsTuple]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -98,7 +83,6 @@ a, b, c = x  # E: Need more than 2 values to unpack (3 expected)
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleWithItemTypes]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class N(NamedTuple):
@@ -116,7 +100,6 @@ if int():
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleConstructorArgumentTypes]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class N(NamedTuple):
@@ -130,7 +113,6 @@ N(b='x', a=1)
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleAsBaseClass]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class N(NamedTuple):
@@ -151,7 +133,6 @@ if int():
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleSelfTypeWithNamedTupleAsBase]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class A(NamedTuple):
@@ -172,7 +153,6 @@ class B(A):
 [out]
 
 [case testNewNamedTupleTypeReferenceToClassDerivedFrom]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class A(NamedTuple):
@@ -194,7 +174,6 @@ class B(A):
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleSubtyping]
-# flags: --python-version 3.6
 from typing import NamedTuple, Tuple
 
 class A(NamedTuple):
@@ -222,7 +201,6 @@ if int():
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleSimpleTypeInference]
-# flags: --python-version 3.6
 from typing import NamedTuple, Tuple
 
 class A(NamedTuple):
@@ -239,7 +217,6 @@ a = (1,)  # E: Incompatible types in assignment (expression has type "Tuple[int]
 [builtins fixtures/list.pyi]
 
 [case testNewNamedTupleMissingClassAttribute]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class MyNamedTuple(NamedTuple):
@@ -250,7 +227,6 @@ MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x"
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleEmptyItems]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class A(NamedTuple):
@@ -258,7 +234,6 @@ class A(NamedTuple):
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleForwardRef]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class A(NamedTuple):
@@ -271,7 +246,6 @@ a = A(1)  # E: Argument 1 to "A" has incompatible type "int"; expected "B"
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleProperty36]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class A(NamedTuple):
@@ -288,7 +262,6 @@ C(2).b
 [builtins fixtures/property.pyi]
 
 [case testNewNamedTupleAsDict]
-# flags: --python-version 3.6
 from typing import NamedTuple, Any
 
 class X(NamedTuple):
@@ -301,7 +274,6 @@ reveal_type(x._asdict())  # N: Revealed type is "builtins.dict[builtins.str, Any
 [builtins fixtures/dict.pyi]
 
 [case testNewNamedTupleReplaceTyped]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -315,7 +287,6 @@ x._replace(y=5)  # E: Argument "y" to "_replace" of "X" has incompatible type "i
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleFields]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -333,7 +304,6 @@ reveal_type(X.__annotations__)  # N: Revealed type is "typing.Mapping[builtins.s
 [builtins fixtures/dict.pyi]
 
 [case testNewNamedTupleUnit]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -345,7 +315,6 @@ x._fields[0]  # E: Tuple index out of range
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleJoinNamedTuple]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -360,7 +329,6 @@ reveal_type([X(3, 'b'), Y(1, 'a')])  # N: Revealed type is "builtins.list[Tuple[
 [builtins fixtures/list.pyi]
 
 [case testNewNamedTupleJoinTuple]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -373,7 +341,6 @@ reveal_type([X(1, 'a'), (3, 'b')])  # N: Revealed type is "builtins.list[Tuple[b
 [builtins fixtures/list.pyi]
 
 [case testNewNamedTupleWithTooManyArguments]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -383,25 +350,17 @@ class X(NamedTuple):
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleWithInvalidItems2]
-# flags: --python-version 3.6
 import typing
 
 class X(typing.NamedTuple):
     x: int
-    y = 1
-    x.x: int
+    y = 1  # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"
+    x.x: int  # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"
     z: str = 'z'
-    aa: int
-
-[out]
-main:6: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"
-main:7: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"
-main:9: error: Non-default NamedTuple fields cannot follow default fields
-
+    aa: int  # E: Non-default NamedTuple fields cannot follow default fields
 [builtins fixtures/list.pyi]
 
 [case testNewNamedTupleWithoutTypesSpecified]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -410,7 +369,6 @@ class X(NamedTuple):
 [builtins fixtures/tuple.pyi]
 
 [case testTypeUsingTypeCNamedTuple]
-# flags: --python-version 3.6
 from typing import NamedTuple, Type
 
 class N(NamedTuple):
@@ -418,13 +376,10 @@ class N(NamedTuple):
     y: str
 
 def f(a: Type[N]):
-    a()
+    a()  # E: Missing positional arguments "x", "y" in call to "N"
 [builtins fixtures/list.pyi]
-[out]
-main:9: error: Missing positional arguments "x", "y" in call to "N"
 
 [case testNewNamedTupleWithDefaults]
-# flags: --python-version 3.6
 from typing import List, NamedTuple, Optional
 
 class X(NamedTuple):
@@ -464,7 +419,7 @@ UserDefined(1)  # E: Argument 1 to "UserDefined" has incompatible type "int"; ex
 [builtins fixtures/list.pyi]
 
 [case testNewNamedTupleWithDefaultsStrictOptional]
-# flags: --strict-optional --python-version 3.6
+# flags: --strict-optional
 from typing import List, NamedTuple, Optional
 
 class HasNone(NamedTuple):
@@ -483,7 +438,6 @@ class CannotBeNone(NamedTuple):
 [builtins fixtures/list.pyi]
 
 [case testNewNamedTupleWrongType]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -492,7 +446,6 @@ class X(NamedTuple):
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleErrorInDefault]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
@@ -500,7 +453,6 @@ class X(NamedTuple):
 [builtins fixtures/tuple.pyi]
 
 [case testNewNamedTupleInheritance]
-# flags: --python-version 3.6
 from typing import NamedTuple
 
 class X(NamedTuple):
diff --git a/test-data/unit/check-fastparse.test b/test-data/unit/check-fastparse.test
index 132a34503b89..534967b1edbf 100644
--- a/test-data/unit/check-fastparse.test
+++ b/test-data/unit/check-fastparse.test
@@ -31,7 +31,6 @@ def f(x):  # E: Invalid type comment or annotation
   pass
 
 [case testFastParseInvalidTypes3]
-# flags: --python-version 3.6
 # All of these should not crash
 from typing import Callable, Tuple, Iterable
 
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
index c356028f6620..3750c44ed7f3 100644
--- a/test-data/unit/check-flags.test
+++ b/test-data/unit/check-flags.test
@@ -1733,7 +1733,7 @@ def h() -> List[Any]:  # E: Explicit "Any" is not allowed
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyExplicitVarDeclaration]
-# flags: --python-version 3.6 --disallow-any-explicit
+# flags: --disallow-any-explicit
 from typing import Any
 v: Any = ''  # E: Explicit "Any" is not allowed
 w = ''  # type: Any  # E: Explicit "Any" is not allowed
@@ -1741,7 +1741,7 @@ class X:
     y = ''  # type: Any  # E: Explicit "Any" is not allowed
 
 [case testDisallowAnyExplicitGenericVarDeclaration]
-# flags: --python-version 3.6 --disallow-any-explicit
+# flags: --disallow-any-explicit
 from typing import Any, List
 v: List[Any] = []  # E: Explicit "Any" is not allowed
 [builtins fixtures/list.pyi]
@@ -1836,7 +1836,7 @@ N = TypedDict('N', {'x': str, 'y': List})  # no error
 [builtins fixtures/dict.pyi]
 
 [case testDisallowAnyGenericsTupleNoTypeParams]
-# flags: --python-version 3.6 --disallow-any-generics
+# flags: --disallow-any-generics
 from typing import Tuple
 
 def f(s: Tuple) -> None: pass  # E: Missing type parameters for generic type "Tuple"
@@ -1877,7 +1877,7 @@ def g(l: L[str]) -> None: pass  # no error
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyGenericsGenericAlias]
-# flags: --python-version 3.6 --disallow-any-generics
+# flags: --disallow-any-generics
 from typing import TypeVar, Tuple
 
 T = TypeVar('T')
@@ -1892,7 +1892,7 @@ x: A = ('a', 'b', 1)  # E: Missing type parameters for generic type "A"
 [builtins fixtures/tuple.pyi]
 
 [case testDisallowAnyGenericsPlainList]
-# flags: --python-version 3.6 --disallow-any-generics
+# flags: --disallow-any-generics
 from typing import List
 
 def f(l: List) -> None: pass  # E: Missing type parameters for generic type "List"
@@ -1905,7 +1905,7 @@ y: List = []  # E: Missing type parameters for generic type "List"
 [builtins fixtures/list.pyi]
 
 [case testDisallowAnyGenericsCustomGenericClass]
-# flags: --python-version 3.6 --disallow-any-generics
+# flags: --disallow-any-generics
 from typing import Generic, TypeVar, Any
 
 T = TypeVar('T')
diff --git a/test-data/unit/check-formatting.test b/test-data/unit/check-formatting.test
index f63abbb33034..7d23c2e199f1 100644
--- a/test-data/unit/check-formatting.test
+++ b/test-data/unit/check-formatting.test
@@ -103,7 +103,6 @@ a = None # type: Any
 [typing fixtures/typing-medium.pyi]
 
 [case testStringInterpolationC]
-# flags: --python-version 3.6
 '%c' % 1
 '%c' % 1.0   # E: "%c" requires int or char (expression has type "float")
 '%c' % 's'
@@ -232,18 +231,12 @@ t5: Iterable[str] = ('A', 'B')
 -- Bytes interpolation
 -- --------------------
 
-
-[case testBytesInterpolationBefore35]
-# flags: --python-version 3.4
-b'%b' % 1  # E: Unsupported left operand type for % ("bytes")
-
 [case testBytesInterpolation]
 b'%b' % 1  # E: Incompatible types in string interpolation (expression has type "int", placeholder has type "bytes")
 b'%b' % b'1'
 b'%a' % 3
 
 [case testBytesInterpolationC]
-# flags: --python-version 3.6
 b'%c' % 1
 b'%c' % 1.0   # E: "%c" requires an integer in range(256) or a single byte (expression has type "float")
 b'%c' % 's'   # E: "%c" requires an integer in range(256) or a single byte (expression has type "str")
diff --git a/test-data/unit/check-generic-alias.test b/test-data/unit/check-generic-alias.test
index 574a57607d11..8c90b5adba34 100644
--- a/test-data/unit/check-generic-alias.test
+++ b/test-data/unit/check-generic-alias.test
@@ -200,7 +200,6 @@ t23: collections.abc.ValuesView[str]
 
 
 [case testGenericBuiltinTupleTyping]
-# flags: --python-version 3.6
 from typing import Tuple
 
 t01: Tuple = ()
@@ -248,7 +247,6 @@ reveal_type(tuple[int, ...]())  # N: Revealed type is "builtins.tuple[builtins.i
 [builtins fixtures/tuple.pyi]
 
 [case testTypeAliasWithBuiltinTupleInStub]
-# flags: --python-version 3.6
 import m
 reveal_type(m.a)  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
 reveal_type(m.b)  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
@@ -261,7 +259,6 @@ b: B
 [builtins fixtures/tuple.pyi]
 
 [case testTypeAliasWithBuiltinListInStub]
-# flags: --python-version 3.6
 import m
 reveal_type(m.a)  # N: Revealed type is "builtins.list[builtins.int]"
 reveal_type(m.b)  # N: Revealed type is "builtins.list[builtins.list[builtins.int]]"
@@ -280,7 +277,6 @@ d: type[str]
 
 
 [case testTypeAliasWithBuiltinListAliasInStub]
-# flags: --python-version 3.6
 import m
 reveal_type(m.a()[0])  # N: Revealed type is "builtins.int"
 
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 42e3d23eddb9..90d46c217451 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -596,7 +596,7 @@ main:13: error: Argument 2 to "Node" has incompatible type "int"; expected "str"
 
 -- Error formatting is a bit different (and probably better) with new analyzer
 [case testGenericTypeAliasesWrongAliases]
-# flags: --show-column-numbers --python-version 3.6 --no-strict-optional
+# flags: --show-column-numbers --no-strict-optional
 from typing import TypeVar, Generic, List, Callable, Tuple, Union
 T = TypeVar('T')
 S = TypeVar('S')
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index cd009887a5b5..d8461fc78815 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -5508,7 +5508,6 @@ class Foo:
 class C: pass
 
 [case testIncrementalNestedNamedTuple]
-# flags: --python-version 3.6
 import a
 
 [file a.py]
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index ee13cb3830fc..3c4a0943556a 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -913,7 +913,6 @@ def call(c: Callable[[int], Any], i: int) -> None:
 [out]
 
 [case testCallableMeetAndJoin]
-# flags: --python-version 3.6
 from typing import Callable, Any, TypeVar
 
 class A: ...
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
index fc3daff64fbd..4992b6589bb3 100644
--- a/test-data/unit/check-modules.test
+++ b/test-data/unit/check-modules.test
@@ -2067,16 +2067,6 @@ def __getattr__(name): ...
 
 [builtins fixtures/module.pyi]
 
-[case testModuleLevelGetattrNotStub36]
-# flags: --python-version 3.6
-import has_getattr
-reveal_type(has_getattr.any_attribute)  # E: Module has no attribute "any_attribute" \
-                                        # N: Revealed type is "Any"
-[file has_getattr.py]
-def __getattr__(name) -> str: ...
-
-[builtins fixtures/module.pyi]
-
 [case testModuleLevelGetattrNotStub37]
 # flags: --python-version 3.7
 
@@ -2111,17 +2101,6 @@ def __getattr__(name: str) -> int: ...
 
 [builtins fixtures/module.pyi]
 
-[case testModuleLevelGetattrImportFromNotStub36]
-# flags: --python-version 3.6
-from non_stub import name  # E: Module "non_stub" has no attribute "name"
-reveal_type(name)  # N: Revealed type is "Any"
-
-[file non_stub.py]
-from typing import Any
-def __getattr__(name: str) -> Any: ...
-
-[builtins fixtures/module.pyi]
-
 [case testModuleLevelGetattrImportFromNotStub37]
 # flags: --python-version 3.7
 from non_stub import name
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
index 83cc8c099deb..d69b924971e1 100644
--- a/test-data/unit/check-namedtuple.test
+++ b/test-data/unit/check-namedtuple.test
@@ -38,18 +38,7 @@ x.y
 x.z # E: "X" has no attribute "z"
 [builtins fixtures/tuple.pyi]
 
-[case testNamedTupleClassPython35]
-# flags: --python-version 3.5
-from typing import NamedTuple
-
-class A(NamedTuple):
-    x = 3  # type: int
-[builtins fixtures/tuple.pyi]
-[out]
-main:4: error: NamedTuple class syntax is only supported in Python 3.6
-
-[case testNamedTupleClassInStubPython35]
-# flags: --python-version 3.5
+[case testNamedTupleClassInStub]
 import foo
 
 [file foo.pyi]
diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test
index c329ccf840a8..f06af0057f0f 100644
--- a/test-data/unit/check-narrowing.test
+++ b/test-data/unit/check-narrowing.test
@@ -1138,7 +1138,6 @@ reveal_type(x) # N: Revealed type is "builtins.bool"
 [builtins fixtures/primitives.pyi]
 
 [case testNarrowingTypedDictUsingEnumLiteral]
-# flags: --python-version 3.6
 from typing import Union
 from typing_extensions import TypedDict, Literal
 from enum import Enum
diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test
index 77a1553d4715..8300957ee511 100644
--- a/test-data/unit/check-newsemanal.test
+++ b/test-data/unit/check-newsemanal.test
@@ -2571,18 +2571,6 @@ import n
 [file n.pyi]
 class C: pass
 
-[case testNewAnalyzerModuleGetAttrInPython36]
-# flags: --python-version 3.6
-import m
-import n
-
-x: m.n.C # E: Name "m.n.C" is not defined
-y: n.D # E: Name "n.D" is not defined
-[file m.py]
-import n
-[file n.py]
-def __getattr__(x): pass
-
 [case testNewAnalyzerModuleGetAttrInPython37]
 # flags: --python-version 3.7
 import m
diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test
index cfcbfc598c51..0815d7af1933 100644
--- a/test-data/unit/check-newsyntax.test
+++ b/test-data/unit/check-newsyntax.test
@@ -1,15 +1,8 @@
-[case testNewSyntaxRequire36]
-# flags: --python-version 3.5
-x: int = 5  # E: Variable annotation syntax is only supported in Python 3.6 and greater
-[out]
-
 [case testNewSyntaxSyntaxError]
-# flags: --python-version 3.6
 x: int: int  # E: invalid syntax
 [out]
 
 [case testNewSyntaxBasics]
-# flags: --python-version 3.6
 x: int
 x = 5
 y: int = 5
@@ -19,11 +12,10 @@ a = 5  # E: Incompatible types in assignment (expression has type "int", variabl
 b: str = 5  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
 
 zzz: int
-zzz: str  # E: Name "zzz" already defined on line 10
+zzz: str  # E: Name "zzz" already defined on line 9
 [out]
 
 [case testNewSyntaxWithDict]
-# flags: --python-version 3.6
 from typing import Dict, Any
 
 d: Dict[int, str] = {}
@@ -34,7 +26,6 @@ d['ab'] = 'ab'  # E: Invalid index type "str" for "Dict[int, str]"; expected typ
 [out]
 
 [case testNewSyntaxWithRevealType]
-# flags: --python-version 3.6
 from typing import Dict
 
 def tst_local(dct: Dict[int, T]) -> Dict[T, int]:
@@ -46,7 +37,6 @@ reveal_type(tst_local({1: 'a'}))  # N: Revealed type is "builtins.dict[builtins.
 [out]
 
 [case testNewSyntaxWithInstanceVars]
-# flags: --python-version 3.6
 class TstInstance:
     a: str
     def __init__(self) -> None:
@@ -59,20 +49,20 @@ TstInstance().a = 'ab'
 [out]
 
 [case testNewSyntaxWithClassVars]
-# flags: --strict-optional --python-version 3.6
+# flags: --strict-optional
 class CCC:
     a: str = None  # E: Incompatible types in assignment (expression has type "None", variable has type "str")
 [out]
 
 [case testNewSyntaxWithStrictOptional]
-# flags: --strict-optional --python-version 3.6
+# flags: --strict-optional
 strict: int
 strict = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 strict2: int = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 [out]
 
 [case testNewSyntaxWithStrictOptionalFunctions]
-# flags: --strict-optional --python-version 3.6
+# flags: --strict-optional
 def f() -> None:
     x: int
     if int():
@@ -80,7 +70,7 @@ def f() -> None:
 [out]
 
 [case testNewSyntaxWithStrictOptionalClasses]
-# flags: --strict-optional --python-version 3.6
+# flags: --strict-optional
 class C:
     def meth(self) -> None:
         x: int = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
@@ -88,25 +78,18 @@ class C:
 [out]
 
 [case testNewSyntaxSpecialAssign]
-# flags: --python-version 3.6
 class X:
     x: str
     x[0]: int
     x.x: int
 
 [out]
-main:4: error: Unexpected type declaration
-main:4: error: Unsupported target for indexed assignment ("str")
-main:5: error: Type cannot be declared in assignment to non-self attribute
-main:5: error: "str" has no attribute "x"
-
-[case testNewSyntaxAsyncComprehensionError]
-# flags: --python-version 3.5
-async def f():
-    results = [i async for i in aiter() if i % 2]  # E: Async comprehensions are only supported in Python 3.6 and greater
+main:3: error: Unexpected type declaration
+main:3: error: Unsupported target for indexed assignment ("str")
+main:4: error: Type cannot be declared in assignment to non-self attribute
+main:4: error: "str" has no attribute "x"
 
 [case testNewSyntaxFStringBasics]
-# flags: --python-version 3.6
 f'foobar'
 f'{"foobar"}'
 f'foo{"bar"}'
@@ -118,22 +101,19 @@ a = f'{"foobar"}'
 [builtins fixtures/f_string.pyi]
 
 [case testNewSyntaxFStringExpressionsOk]
-# flags: --python-version 3.6
 f'.{1 + 1}.'
 f'.{1 + 1}.{"foo" + "bar"}'
 [builtins fixtures/f_string.pyi]
 
 [case testNewSyntaxFStringExpressionsErrors]
-# flags: --python-version 3.6
 f'{1 + ""}'
 f'.{1 + ""}'
 [builtins fixtures/f_string.pyi]
 [out]
+main:1: error: Unsupported operand types for + ("int" and "str")
 main:2: error: Unsupported operand types for + ("int" and "str")
-main:3: error: Unsupported operand types for + ("int" and "str")
 
 [case testNewSyntaxFStringParseFormatOptions]
-# flags: --python-version 3.6
 value = 10.5142
 width = 10
 precision = 4
@@ -141,7 +121,6 @@ f'result: {value:{width}.{precision}}'
 [builtins fixtures/f_string.pyi]
 
 [case testNewSyntaxFStringSingleField]
-# flags: --python-version 3.6
 v = 1
 reveal_type(f'{v}') # N: Revealed type is "builtins.str"
 reveal_type(f'{1}') # N: Revealed type is "builtins.str"
diff --git a/test-data/unit/check-singledispatch.test b/test-data/unit/check-singledispatch.test
index 1bc34c6fdaab..1adec1575b7e 100644
--- a/test-data/unit/check-singledispatch.test
+++ b/test-data/unit/check-singledispatch.test
@@ -80,20 +80,6 @@ def g(arg: int) -> None: # E: Argument to register "str" is incompatible with ty
 
 [builtins fixtures/args.pyi]
 
-[case testDispatchBasedOnTypeAnnotationsRequires37-xfail]
-# flags: --python-version 3.6
-# the docs for singledispatch say that register didn't accept type annotations until python 3.7
-from functools import singledispatch
-
-@singledispatch
-def f(arg) -> None:
-    pass
-@f.register
-def g(arg: int) -> None: # E: Singledispatch based on type annotations is only supported in Python 3.7 and greater
-    pass
-
-[builtins fixtures/args.pyi]
-
 [case testTypePassedAsArgumentToRegister]
 from functools import singledispatch
 
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 5cb89a6854be..f64d24a4ed6b 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -1455,8 +1455,7 @@ x7, x8, y7, y8 = *points2, *points3 # E: Contiguous iterable with same type expe
 x9, y9, x10, y10, z5 = *points2, 1, *points2 # E: Contiguous iterable with same type expected
 [builtins fixtures/tuple.pyi]
 
-[case testAssignEmptyPy36]
-# flags: --python-version 3.6
+[case testAssignEmpty]
 () = []
 
 [case testAssignEmptyBogus]
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index 739d1ba6eb75..983fa8c17aec 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -87,7 +87,6 @@ D = TypedDict('D', {
 -- Define TypedDict (Class syntax)
 
 [case testCanCreateTypedDictWithClass]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class Point(TypedDict):
@@ -99,7 +98,6 @@ reveal_type(p)  # N: Revealed type is "TypedDict('__main__.Point', {'x': builtin
 [builtins fixtures/dict.pyi]
 
 [case testCanCreateTypedDictWithSubclass]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class Point1D(TypedDict):
@@ -113,7 +111,6 @@ reveal_type(p)  # N: Revealed type is "TypedDict('__main__.Point2D', {'x': built
 [builtins fixtures/dict.pyi]
 
 [case testCanCreateTypedDictWithSubclass2]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class Point1D(TypedDict):
@@ -126,7 +123,6 @@ reveal_type(p)  # N: Revealed type is "TypedDict('__main__.Point2D', {'x': built
 [builtins fixtures/dict.pyi]
 
 [case testCanCreateTypedDictClassEmpty]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class EmptyDict(TypedDict):
@@ -138,10 +134,7 @@ reveal_type(p)  # N: Revealed type is "TypedDict('__main__.EmptyDict', {})"
 
 
 [case testCanCreateTypedDictWithClassOldVersion]
-# flags: --python-version 3.5
-
-# Test that we can use class-syntax to merge TypedDicts even in
-# versions without type annotations
+# Test that we can use class-syntax to merge function-based TypedDicts
 
 from mypy_extensions import TypedDict
 
@@ -165,7 +158,6 @@ foo({'name': 'lol', 'year': 2009, 'based_on': 0})  # E: Incompatible types (expr
 -- Define TypedDict (Class syntax errors)
 
 [case testCannotCreateTypedDictWithClassOtherBases]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class A: pass
@@ -195,7 +187,6 @@ class C(TypedDict, TypedDict): # E: Duplicate base class "TypedDict"
 [typing fixtures/typing-typeddict.pyi]
 
 [case testCannotCreateTypedDictWithClassWithOtherStuff]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class Point(TypedDict):
@@ -251,7 +242,6 @@ Point = TypedDict('Point', {'x': int, 'y': int, '_fallback': object})
 [builtins fixtures/dict.pyi]
 
 [case testCanCreateTypedDictWithClassUnderscores]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class Point(TypedDict):
@@ -263,7 +253,6 @@ reveal_type(p) # N: Revealed type is "TypedDict('__main__.Point', {'x': builtins
 [builtins fixtures/dict.pyi]
 
 [case testCannotCreateTypedDictWithDuplicateKey1]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class Bad(TypedDict):
@@ -291,7 +280,6 @@ reveal_type(d2) # N: Revealed type is "TypedDict('__main__.D2', {'x': builtins.s
 [typing fixtures/typing-typeddict.pyi]
 
 [case testCanCreateTypedDictWithClassOverwriting]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class Point1(TypedDict):
@@ -306,7 +294,6 @@ reveal_type(b) # N: Revealed type is "TypedDict('__main__.Bad', {'x': builtins.i
 [builtins fixtures/dict.pyi]
 
 [case testCanCreateTypedDictWithClassOverwriting2]
-# flags: --python-version 3.6
 from mypy_extensions import TypedDict
 
 class Point1(TypedDict):
@@ -1774,7 +1761,6 @@ reveal_type(td.pop('c'))  # E: TypedDict "TDA" has no key "c" \
 [typing fixtures/typing-typeddict.pyi]
 
 [case testCanCreateTypedDictWithTypingExtensions]
-# flags: --python-version 3.6
 from typing_extensions import TypedDict
 
 class Point(TypedDict):
diff --git a/test-data/unit/check-underscores.test b/test-data/unit/check-underscores.test
index ac9fad2ca792..2a789b3314f3 100644
--- a/test-data/unit/check-underscores.test
+++ b/test-data/unit/check-underscores.test
@@ -1,10 +1,4 @@
-[case testUnderscoresRequire36]
-# flags: --python-version 3.5
-x = 1000_000  # E: Underscores in numeric literals are only supported in Python 3.6 and greater
-[out]
-
 [case testUnderscoresBasics]
-# flags: --python-version 3.6
 x: int
 x = 1000_000
 x = 0x_FF_FF_FF_FF
diff --git a/test-data/unit/check-union-or-syntax.test b/test-data/unit/check-union-or-syntax.test
index 58526cfd0623..f342d0ca34a5 100644
--- a/test-data/unit/check-union-or-syntax.test
+++ b/test-data/unit/check-union-or-syntax.test
@@ -66,8 +66,8 @@ x: List[int | str]
 reveal_type(x)  # N: Revealed type is "builtins.list[Union[builtins.int, builtins.str]]"
 [builtins fixtures/list.pyi]
 
-[case testUnionOrSyntaxWithQuotedFunctionTypes]
-# flags: --python-version 3.4
+[case testUnionOrSyntaxWithQuotedFunctionTypesPre310]
+# flags: --python-version 3.9
 from typing import Union
 def f(x: 'Union[int, str, None]') -> 'Union[int, None]':
     reveal_type(x)  # N: Revealed type is "Union[builtins.int, builtins.str, None]"
@@ -79,8 +79,8 @@ def g(x: "int | str | None") -> "int | None":
     return 42
 reveal_type(g)  # N: Revealed type is "def (x: Union[builtins.int, builtins.str, None]) -> Union[builtins.int, None]"
 
-[case testUnionOrSyntaxWithQuotedVariableTypes]
-# flags: --python-version 3.6
+[case testUnionOrSyntaxWithQuotedVariableTypesPre310]
+# flags: --python-version 3.9
 y: "int | str" = 42
 reveal_type(y)  # N: Revealed type is "Union[builtins.int, builtins.str]"
 
@@ -124,7 +124,6 @@ cast(str | int, 'x')  # E: Cast target is not a type
 [typing fixtures/typing-full.pyi]
 
 [case testUnionOrSyntaxInComment]
-# flags: --python-version 3.6
 x = 1  # type: int | str
 
 [case testUnionOrSyntaxFutureImport]
@@ -138,7 +137,7 @@ x: int | None
 x: int | None  # E: X | Y syntax for unions requires Python 3.10
 
 [case testUnionOrSyntaxInStubFile]
-# flags: --python-version 3.6
+# flags: --python-version 3.9
 from lib import x
 [file lib.pyi]
 x: int | None
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
index 1db2a16e2e1c..82ff35f53702 100644
--- a/test-data/unit/check-unreachable-code.test
+++ b/test-data/unit/check-unreachable-code.test
@@ -422,9 +422,9 @@ x = 1
 [out]
 
 [case testCustomSysVersionInfo]
-# flags: --python-version 3.5
+# flags: --python-version 3.11
 import sys
-if sys.version_info == (3, 5):
+if sys.version_info == (3, 11):
     x = "foo"
 else:
     x = 3
@@ -433,7 +433,7 @@ reveal_type(x)  # N: Revealed type is "builtins.str"
 [out]
 
 [case testCustomSysVersionInfo2]
-# flags: --python-version 3.5
+# flags: --python-version 3.11
 import sys
 if sys.version_info == (3, 6):
     x = "foo"
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
index 6e9fdf6dab65..42f0ee8a9ec6 100644
--- a/test-data/unit/cmdline.test
+++ b/test-data/unit/cmdline.test
@@ -296,7 +296,7 @@ mypy.ini: [mypy]: ignore_missing_imports: Not a boolean: nah
 [file mypy.ini]
 \[mypy]
 \[mypy-*]
-python_version = 3.4
+python_version = 3.11
 [out]
 mypy.ini: [mypy-*]: Per-module sections should only specify per-module flags (python_version)
 == Return code: 0
@@ -592,7 +592,7 @@ main.py:1: error: Cannot find implementation or library stub for module named "a
 \[tool.mypy]
 python_version = 3.10
 [out]
-pyproject.toml: [mypy]: python_version: Python 3.1 is not supported (must be 3.4 or higher). You may need to put quotes around your Python version
+pyproject.toml: [mypy]: python_version: Python 3.1 is not supported (must be 3.7 or higher). You may need to put quotes around your Python version
 == Return code: 0
 
 [case testPythonVersionTooOld10]
@@ -604,13 +604,13 @@ python_version = 1.0
 mypy.ini: [mypy]: python_version: Python major version '1' out of range (must be 3)
 == Return code: 0
 
-[case testPythonVersionTooOld33]
+[case testPythonVersionTooOld36]
 # cmd: mypy -c pass
 [file mypy.ini]
 \[mypy]
-python_version = 3.3
+python_version = 3.6
 [out]
-mypy.ini: [mypy]: python_version: Python 3.3 is not supported (must be 3.4 or higher)
+mypy.ini: [mypy]: python_version: Python 3.6 is not supported (must be 3.7 or higher)
 == Return code: 0
 
 [case testPythonVersionTooNew40]
@@ -633,18 +633,18 @@ usage: mypy [-h] [-v] [-V] [more options; see below]
 mypy: error: Mypy no longer supports checking Python 2 code. Consider pinning to mypy<0.980 if you need to check Python 2 code.
 == Return code: 2
 
-[case testPythonVersionAccepted34]
+[case testPythonVersionAccepted37]
 # cmd: mypy -c pass
 [file mypy.ini]
 \[mypy]
-python_version = 3.4
+python_version = 3.7
 [out]
 
-[case testPythonVersionAccepted36]
+[case testPythonVersionAccepted311]
 # cmd: mypy -c pass
 [file mypy.ini]
 \[mypy]
-python_version = 3.6
+python_version = 3.11
 [out]
 
 -- This should be a dumping ground for tests of plugins that are sensitive to
@@ -676,11 +676,11 @@ int_pow.py:10: note: Revealed type is "builtins.int"
 int_pow.py:11: note: Revealed type is "Any"
 == Return code: 0
 
-[case testDisallowAnyGenericsBuiltinCollections]
+[case testDisallowAnyGenericsBuiltinCollectionsPre39]
 # cmd: mypy m.py
 [file mypy.ini]
 \[mypy]
-python_version=3.6
+python_version = 3.8
 \[mypy-m]
 disallow_any_generics = True
 
diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test
index f208b4e78e54..18a03a92207d 100644
--- a/test-data/unit/daemon.test
+++ b/test-data/unit/daemon.test
@@ -159,18 +159,18 @@ def plugin(version): return Dummy
 [case testDaemonRunRestartGlobs]
 -- Ensure dmypy is not restarted if the configuration doesn't change and it contains globs
 -- Note: Backslash path separator in output is replaced with forward slash so the same test succeeds on Windows as well
-$ dmypy run -- foo --follow-imports=error --python-version=3.6
+$ dmypy run -- foo --follow-imports=error
 Daemon started
 foo/lol.py:1: error: Name "fail" is not defined
 Found 1 error in 1 file (checked 3 source files)
 == Return code: 1
-$ dmypy run -- foo --follow-imports=error --python-version=3.6
+$ dmypy run -- foo --follow-imports=error
 foo/lol.py:1: error: Name "fail" is not defined
 Found 1 error in 1 file (checked 3 source files)
 == Return code: 1
 $ {python} -c "print('[mypy]')" >mypy.ini
 $ {python} -c "print('ignore_errors=True')" >>mypy.ini
-$ dmypy run -- foo --follow-imports=error --python-version=3.6
+$ dmypy run -- foo --follow-imports=error
 Restarting: configuration changed
 Daemon stopped
 Daemon started
@@ -264,7 +264,7 @@ $ dmypy stop
 Daemon stopped
 
 [case testDaemonWarningSuccessExitCode-posix]
-$ dmypy run -- foo.py --follow-imports=error
+$ dmypy run -- foo.py --follow-imports=error --python-version=3.11
 Daemon started
 foo.py:2: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs
 Success: no issues found in 1 source file
@@ -282,13 +282,13 @@ def foo():
 [case testDaemonQuickstart]
 $ {python} -c "print('x=1')" >foo.py
 $ {python} -c "print('x=1')" >bar.py
-$ mypy --local-partial-types --cache-fine-grained --follow-imports=error --no-sqlite-cache --python-version=3.6 -- foo.py bar.py
+$ mypy --local-partial-types --cache-fine-grained --follow-imports=error --no-sqlite-cache --python-version=3.11 -- foo.py bar.py
 Success: no issues found in 2 source files
-$ {python} -c "import shutil; shutil.copy('.mypy_cache/3.6/bar.meta.json', 'asdf.json')"
+$ {python} -c "import shutil; shutil.copy('.mypy_cache/3.11/bar.meta.json', 'asdf.json')"
 -- update bar's timestamp but don't change the file
 $ {python} -c "import time;time.sleep(1)"
 $ {python} -c "print('x=1')" >bar.py
-$ dmypy run -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.6
+$ dmypy run -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.11
 Daemon started
 Success: no issues found in 2 source files
 $ dmypy status --fswatcher-dump-file test.json
@@ -296,11 +296,11 @@ Daemon is up and running
 $ dmypy stop
 Daemon stopped
 -- copy the original bar cache file back so that the mtime mismatches
-$ {python} -c "import shutil; shutil.copy('asdf.json', '.mypy_cache/3.6/bar.meta.json')"
+$ {python} -c "import shutil; shutil.copy('asdf.json', '.mypy_cache/3.11/bar.meta.json')"
 -- sleep guarantees timestamp changes
 $ {python} -c "import time;time.sleep(1)"
 $ {python} -c "print('lol')" >foo.py
-$ dmypy run --log-file=log -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.6 --quickstart-file test.json
+$ dmypy run --log-file=log -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.11 --quickstart-file test.json
 Daemon started
 foo.py:1: error: Name "lol" is not defined
 Found 1 error in 1 file (checked 2 source files)
@@ -309,7 +309,7 @@ Found 1 error in 1 file (checked 2 source files)
 $ {python} -c "import sys; sys.stdout.write(open('log').read())"
 -- make sure the meta file didn't get updated. we use this as an imperfect proxy for
 -- whether the source file got rehashed, which we don't want it to have been.
-$ {python} -c "x = open('.mypy_cache/3.6/bar.meta.json').read(); y = open('asdf.json').read(); assert x == y"
+$ {python} -c "x = open('.mypy_cache/3.11/bar.meta.json').read(); y = open('asdf.json').read(); assert x == y"
 
 [case testDaemonSuggest]
 $ dmypy start --log-file log.txt -- --follow-imports=error --no-error-summary
diff --git a/test-data/unit/parse-errors.test b/test-data/unit/parse-errors.test
index 33cf9b4f91b4..c6b1c00a6169 100644
--- a/test-data/unit/parse-errors.test
+++ b/test-data/unit/parse-errors.test
@@ -273,17 +273,10 @@ file:3: error: Syntax error in type comment
 file:3: error: Inconsistent use of "*" in function signature
 file:3: error: Inconsistent use of "**" in function signature
 
-[case testPrintStatementInPython35]
-# flags: --python-version 3.5
+[case testPrintStatementInPython3]
 print 1
 [out]
-file:2: error: Missing parentheses in call to 'print'
-
-[case testPrintStatementInPython37]
-# flags: --python-version 3.7
-print 1
-[out]
-file:2: error: Missing parentheses in call to 'print'. Did you mean print(1)?
+file:1: error: Missing parentheses in call to 'print'. Did you mean print(1)?
 
 [case testInvalidConditionInConditionalExpression]
 1 if 2, 3 else 4
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index abc0f6a464a9..289005b36d9a 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -867,7 +867,7 @@ _program.py:20: error: Argument 1 to "tst" has incompatible type "defaultdict[st
 _program.py:24: error: Invalid index type "str" for "MyDDict[Dict[<nothing>, <nothing>]]"; expected type "int"
 
 [case testNoSubcriptionOfStdlibCollections]
-# flags: --python-version 3.6
+# flags: --python-version 3.7
 import collections
 from collections import Counter
 from typing import TypeVar
diff --git a/test-data/unit/reports.test b/test-data/unit/reports.test
index 50dabb1fdea9..a6cde503ca09 100644
--- a/test-data/unit/reports.test
+++ b/test-data/unit/reports.test
@@ -311,7 +311,7 @@ Total      0      14    100.00%
 
 
 [case testAnyExpressionsReportTypesOfAny]
-# cmd: mypy --python-version=3.6 --any-exprs-report report n.py
+# cmd: mypy --any-exprs-report report n.py
 
 [file n.py]
 from typing import Any, List

From d7f9f06710cec4f0bb3cd432786264fba4809897 Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Sat, 15 Jul 2023 11:38:50 +0100
Subject: [PATCH 005/288] Sync typeshed

Source commit:
https://github.com/python/typeshed/commit/a83e55990ca7f9b9f93271b9087a3f433f54d94a
---
 mypy/typeshed/stdlib/_ctypes.pyi              |   6 +-
 mypy/typeshed/stdlib/_decimal.pyi             |   1 +
 mypy/typeshed/stdlib/_weakref.pyi             |   1 +
 mypy/typeshed/stdlib/asyncio/events.pyi       |   1 +
 mypy/typeshed/stdlib/asyncio/taskgroups.pyi   |   8 +-
 mypy/typeshed/stdlib/builtins.pyi             | 108 +++++++++++++++++-
 mypy/typeshed/stdlib/collections/__init__.pyi |  23 +++-
 mypy/typeshed/stdlib/datetime.pyi             |   4 +
 mypy/typeshed/stdlib/doctest.pyi              |   3 +
 mypy/typeshed/stdlib/email/charset.pyi        |   9 +-
 mypy/typeshed/stdlib/email/utils.pyi          |   2 +-
 mypy/typeshed/stdlib/errno.pyi                |  14 ++-
 mypy/typeshed/stdlib/functools.pyi            |  40 ++++---
 mypy/typeshed/stdlib/ipaddress.pyi            |   1 +
 mypy/typeshed/stdlib/json/__init__.pyi        |   9 --
 mypy/typeshed/stdlib/linecache.pyi            |   6 +-
 mypy/typeshed/stdlib/pathlib.pyi              |   1 +
 mypy/typeshed/stdlib/plistlib.pyi             |   1 +
 mypy/typeshed/stdlib/sqlite3/dbapi2.pyi       |  20 ++++
 mypy/typeshed/stdlib/statistics.pyi           |   1 +
 mypy/typeshed/stdlib/tkinter/__init__.pyi     |   6 +-
 mypy/typeshed/stdlib/tkinter/ttk.pyi          |   4 +-
 mypy/typeshed/stdlib/typing.pyi               |   1 +
 mypy/typeshed/stdlib/unittest/mock.pyi        |  27 ++++-
 mypy/typeshed/stdlib/uuid.pyi                 |  40 +++----
 25 files changed, 264 insertions(+), 73 deletions(-)

diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi
index 756ee86d3342..25d604218a00 100644
--- a/mypy/typeshed/stdlib/_ctypes.pyi
+++ b/mypy/typeshed/stdlib/_ctypes.pyi
@@ -151,7 +151,11 @@ class Array(Generic[_CT], _CData):
     def _type_(self) -> type[_CT]: ...
     @_type_.setter
     def _type_(self, value: type[_CT]) -> None: ...
-    raw: bytes  # Note: only available if _CT == c_char
+    # Note: only available if _CT == c_char
+    @property
+    def raw(self) -> bytes: ...
+    @raw.setter
+    def raw(self, value: ReadableBuffer) -> None: ...
     value: Any  # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
     # TODO These methods cannot be annotated correctly at the moment.
     # All of these "Any"s stand for the array's element type, but it's not possible to use _CT
diff --git a/mypy/typeshed/stdlib/_decimal.pyi b/mypy/typeshed/stdlib/_decimal.pyi
index 60c609456954..9a90760bd2c2 100644
--- a/mypy/typeshed/stdlib/_decimal.pyi
+++ b/mypy/typeshed/stdlib/_decimal.pyi
@@ -73,6 +73,7 @@ class Decimal:
     def from_float(cls, __f: float) -> Self: ...
     def __bool__(self) -> bool: ...
     def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
+    def __hash__(self) -> int: ...
     def as_tuple(self) -> DecimalTuple: ...
     def as_integer_ratio(self) -> tuple[int, int]: ...
     def to_eng_string(self, context: Context | None = None) -> str: ...
diff --git a/mypy/typeshed/stdlib/_weakref.pyi b/mypy/typeshed/stdlib/_weakref.pyi
index b6044fac4628..2402d0bfe721 100644
--- a/mypy/typeshed/stdlib/_weakref.pyi
+++ b/mypy/typeshed/stdlib/_weakref.pyi
@@ -22,6 +22,7 @@ class ReferenceType(Generic[_T]):
     __callback__: Callable[[ReferenceType[_T]], Any]
     def __new__(cls, __o: _T, __callback: Callable[[ReferenceType[_T]], Any] | None = ...) -> Self: ...
     def __call__(self) -> _T | None: ...
+    def __hash__(self) -> int: ...
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, item: Any) -> GenericAlias: ...
 
diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi
index 2054f6e522a1..b1b0fcfa5fd7 100644
--- a/mypy/typeshed/stdlib/asyncio/events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/events.pyi
@@ -86,6 +86,7 @@ class TimerHandle(Handle):
         loop: AbstractEventLoop,
         context: Context | None = None,
     ) -> None: ...
+    def __hash__(self) -> int: ...
     def when(self) -> float: ...
     def __lt__(self, other: TimerHandle) -> bool: ...
     def __le__(self, other: TimerHandle) -> bool: ...
diff --git a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi
index 08ea8f66559c..47d9bb2f699e 100644
--- a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi
+++ b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi
@@ -1,5 +1,4 @@
-# This only exists in 3.11+. See VERSIONS.
-
+import sys
 from contextvars import Context
 from types import TracebackType
 from typing import TypeVar
@@ -8,7 +7,10 @@ from typing_extensions import Self
 from . import _CoroutineLike
 from .tasks import Task
 
-__all__ = ["TaskGroup"]
+if sys.version_info >= (3, 12):
+    __all__ = ("TaskGroup",)
+else:
+    __all__ = ["TaskGroup"]
 
 _T = TypeVar("_T")
 
diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index 7415a1b7680d..ea917bddb799 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -56,6 +56,7 @@ from typing import (  # noqa: Y022
 from typing_extensions import (
     Concatenate,
     Literal,
+    LiteralString,
     ParamSpec,
     Self,
     SupportsIndex,
@@ -315,6 +316,7 @@ class int:
     def __float__(self) -> float: ...
     def __int__(self) -> int: ...
     def __abs__(self) -> int: ...
+    def __hash__(self) -> int: ...
     def __bool__(self) -> bool: ...
     def __index__(self) -> int: ...
 
@@ -378,6 +380,7 @@ class float:
     def __int__(self) -> int: ...
     def __float__(self) -> float: ...
     def __abs__(self) -> float: ...
+    def __hash__(self) -> int: ...
     def __bool__(self) -> bool: ...
 
 class complex:
@@ -417,6 +420,7 @@ class complex:
     def __neg__(self) -> complex: ...
     def __pos__(self) -> complex: ...
     def __abs__(self) -> float: ...
+    def __hash__(self) -> int: ...
     def __bool__(self) -> bool: ...
     if sys.version_info >= (3, 11):
         def __complex__(self) -> complex: ...
@@ -432,8 +436,17 @@ class str(Sequence[str]):
     def __new__(cls, object: object = ...) -> Self: ...
     @overload
     def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ...
+    @overload
+    def capitalize(self: LiteralString) -> LiteralString: ...
+    @overload
     def capitalize(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def casefold(self: LiteralString) -> LiteralString: ...
+    @overload
     def casefold(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
+    @overload
     def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
     def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
@@ -441,11 +454,20 @@ class str(Sequence[str]):
         self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
     if sys.version_info >= (3, 8):
+        @overload
+        def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ...
+        @overload
         def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ...  # type: ignore[misc]
     else:
+        @overload
+        def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ...
+        @overload
         def expandtabs(self, tabsize: int = 8) -> str: ...  # type: ignore[misc]
 
     def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
+    @overload
+    def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ...
+    @overload
     def format(self, *args: object, **kwargs: object) -> str: ...
     def format_map(self, map: _FormatMapMapping) -> str: ...
     def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
@@ -461,32 +483,91 @@ class str(Sequence[str]):
     def isspace(self) -> bool: ...
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
+    @overload
+    def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ...
+    @overload
     def join(self, __iterable: Iterable[str]) -> str: ...  # type: ignore[misc]
+    @overload
+    def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
+    @overload
     def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
+    @overload
+    def lower(self: LiteralString) -> LiteralString: ...
+    @overload
     def lower(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
+    @overload
     def lstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
+    @overload
+    def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
+    @overload
     def partition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
+    @overload
+    def replace(
+        self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1
+    ) -> LiteralString: ...
+    @overload
     def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ...  # type: ignore[misc]
     if sys.version_info >= (3, 9):
+        @overload
+        def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ...
+        @overload
         def removeprefix(self, __prefix: str) -> str: ...  # type: ignore[misc]
+        @overload
+        def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ...
+        @overload
         def removesuffix(self, __suffix: str) -> str: ...  # type: ignore[misc]
 
     def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
+    @overload
+    def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
+    @overload
     def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
+    @overload
+    def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
+    @overload
     def rpartition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
+    @overload
+    def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
+    @overload
     def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
+    @overload
+    def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
+    @overload
     def rstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
+    @overload
+    def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
+    @overload
     def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
+    @overload
+    def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ...
+    @overload
     def splitlines(self, keepends: bool = False) -> list[str]: ...  # type: ignore[misc]
     def startswith(
         self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
+    @overload
+    def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
+    @overload
     def strip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
+    @overload
+    def swapcase(self: LiteralString) -> LiteralString: ...
+    @overload
     def swapcase(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def title(self: LiteralString) -> LiteralString: ...
+    @overload
     def title(self) -> str: ...  # type: ignore[misc]
     def translate(self, __table: _TranslateTable) -> str: ...
+    @overload
+    def upper(self: LiteralString) -> LiteralString: ...
+    @overload
     def upper(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ...
+    @overload
     def zfill(self, __width: SupportsIndex) -> str: ...  # type: ignore[misc]
     @staticmethod
     @overload
@@ -497,6 +578,9 @@ class str(Sequence[str]):
     @staticmethod
     @overload
     def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ...
+    @overload
+    def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ...
+    @overload
     def __add__(self, __value: str) -> str: ...  # type: ignore[misc]
     # Incompatible with Sequence.__contains__
     def __contains__(self, __key: str) -> bool: ...  # type: ignore[override]
@@ -504,13 +588,26 @@ class str(Sequence[str]):
     def __ge__(self, __value: str) -> bool: ...
     def __getitem__(self, __key: SupportsIndex | slice) -> str: ...
     def __gt__(self, __value: str) -> bool: ...
+    def __hash__(self) -> int: ...
+    @overload
+    def __iter__(self: LiteralString) -> Iterator[LiteralString]: ...
+    @overload
     def __iter__(self) -> Iterator[str]: ...  # type: ignore[misc]
     def __le__(self, __value: str) -> bool: ...
     def __len__(self) -> int: ...
     def __lt__(self, __value: str) -> bool: ...
+    @overload
+    def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ...
+    @overload
     def __mod__(self, __value: Any) -> str: ...
+    @overload
+    def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
+    @overload
     def __mul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __ne__(self, __value: object) -> bool: ...
+    @overload
+    def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
+    @overload
     def __rmul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __getnewargs__(self) -> tuple[str]: ...
 
@@ -597,6 +694,7 @@ class bytes(Sequence[int]):
     def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ...
     def __len__(self) -> int: ...
     def __iter__(self) -> Iterator[int]: ...
+    def __hash__(self) -> int: ...
     @overload
     def __getitem__(self, __key: SupportsIndex) -> int: ...
     @overload
@@ -1004,7 +1102,13 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     __hash__: ClassVar[None]  # type: ignore[assignment]
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
+        @overload
+        def __or__(self, __value: Mapping[_KT, _VT]) -> dict[_KT, _VT]: ...
+        @overload
         def __or__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ...
+        @overload
+        def __ror__(self, __value: Mapping[_KT, _VT]) -> dict[_KT, _VT]: ...
+        @overload
         def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ...
         # dict.__ior__ should be kept roughly in line with MutableMapping.update()
         @overload  # type: ignore[misc]
@@ -1665,11 +1769,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit
 # Instead, we special-case the most common examples of this: bool and literal integers.
 if sys.version_info >= (3, 8):
     @overload
-    def sum(__iterable: Iterable[bool], start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ...  # type: ignore[misc]
 
 else:
     @overload
-    def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ...  # type: ignore[misc]
 
 @overload
 def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ...
diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi
index e56baf8b52c9..36d79101908d 100644
--- a/mypy/typeshed/stdlib/collections/__init__.pyi
+++ b/mypy/typeshed/stdlib/collections/__init__.pyi
@@ -83,8 +83,14 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     @overload
     def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ...
     if sys.version_info >= (3, 9):
+        @overload
+        def __or__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ...
+        @overload
         def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ...
-        def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ...  # type: ignore[misc]
+        @overload  # type: ignore[misc]
+        def __ror__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ...
+        @overload  # type: ignore[misc]
+        def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ...
         # UserDict.__ior__ should be kept roughly in line with MutableMapping.update()
         @overload  # type: ignore[misc]
         def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ...
@@ -391,6 +397,15 @@ class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]):
     def __missing__(self, __key: _KT) -> _VT: ...
     def __copy__(self) -> Self: ...
     def copy(self) -> Self: ...
+    if sys.version_info >= (3, 9):
+        @overload
+        def __or__(self, __value: Mapping[_KT, _VT]) -> Self: ...
+        @overload
+        def __or__(self, __value: Mapping[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ...
+        @overload
+        def __ror__(self, __value: Mapping[_KT, _VT]) -> Self: ...
+        @overload
+        def __ror__(self, __value: Mapping[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ...
 
 class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     maps: list[MutableMapping[_KT, _VT]]
@@ -425,7 +440,13 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     @overload
     def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> ChainMap[_T, _S]: ...
     if sys.version_info >= (3, 9):
+        @overload
+        def __or__(self, other: Mapping[_KT, _VT]) -> Self: ...
+        @overload
         def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ...
+        @overload
+        def __ror__(self, other: Mapping[_KT, _VT]) -> Self: ...
+        @overload
         def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ...
         # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update()
         @overload  # type: ignore[misc]
diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi
index 2bb2264c97b1..00d511915f20 100644
--- a/mypy/typeshed/stdlib/datetime.pyi
+++ b/mypy/typeshed/stdlib/datetime.pyi
@@ -35,6 +35,7 @@ class timezone(tzinfo):
     def tzname(self, __dt: datetime | None) -> str: ...
     def utcoffset(self, __dt: datetime | None) -> timedelta: ...
     def dst(self, __dt: datetime | None) -> None: ...
+    def __hash__(self) -> int: ...
 
 if sys.version_info >= (3, 11):
     UTC: timezone
@@ -106,6 +107,7 @@ class date:
         @overload
         def __sub__(self, __value: date) -> timedelta: ...
 
+    def __hash__(self) -> int: ...
     def weekday(self) -> int: ...
     def isoweekday(self) -> int: ...
     if sys.version_info >= (3, 9):
@@ -143,6 +145,7 @@ class time:
     def __lt__(self, __value: time) -> bool: ...
     def __ge__(self, __value: time) -> bool: ...
     def __gt__(self, __value: time) -> bool: ...
+    def __hash__(self) -> int: ...
     def isoformat(self, timespec: str = ...) -> str: ...
     @classmethod
     def fromisoformat(cls, __time_string: str) -> Self: ...
@@ -217,6 +220,7 @@ class timedelta:
     def __ge__(self, __value: timedelta) -> bool: ...
     def __gt__(self, __value: timedelta) -> bool: ...
     def __bool__(self) -> bool: ...
+    def __hash__(self) -> int: ...
 
 class datetime(date):
     min: ClassVar[datetime]
diff --git a/mypy/typeshed/stdlib/doctest.pyi b/mypy/typeshed/stdlib/doctest.pyi
index 88d066fdc23c..f3c05781ad92 100644
--- a/mypy/typeshed/stdlib/doctest.pyi
+++ b/mypy/typeshed/stdlib/doctest.pyi
@@ -85,6 +85,7 @@ class Example:
         indent: int = 0,
         options: dict[int, bool] | None = None,
     ) -> None: ...
+    def __hash__(self) -> int: ...
     def __eq__(self, other: object) -> bool: ...
 
 class DocTest:
@@ -103,6 +104,7 @@ class DocTest:
         lineno: int | None,
         docstring: str | None,
     ) -> None: ...
+    def __hash__(self) -> int: ...
     def __lt__(self, other: DocTest) -> bool: ...
     def __eq__(self, other: object) -> bool: ...
 
@@ -210,6 +212,7 @@ class DocTestCase(unittest.TestCase):
     ) -> None: ...
     def runTest(self) -> None: ...
     def format_failure(self, err: str) -> str: ...
+    def __hash__(self) -> int: ...
     def __eq__(self, other: object) -> bool: ...
 
 class SkipDocTestCase(DocTestCase):
diff --git a/mypy/typeshed/stdlib/email/charset.pyi b/mypy/typeshed/stdlib/email/charset.pyi
index e612847c75b6..d61950a26424 100644
--- a/mypy/typeshed/stdlib/email/charset.pyi
+++ b/mypy/typeshed/stdlib/email/charset.pyi
@@ -1,4 +1,6 @@
-from collections.abc import Iterator
+from collections.abc import Callable, Iterator
+from email.message import Message
+from typing import overload
 
 __all__ = ["Charset", "add_alias", "add_charset", "add_codec"]
 
@@ -14,10 +16,13 @@ class Charset:
     input_codec: str | None
     output_codec: str | None
     def __init__(self, input_charset: str = "us-ascii") -> None: ...
-    def get_body_encoding(self) -> str: ...
+    def get_body_encoding(self) -> str | Callable[[Message], None]: ...
     def get_output_charset(self) -> str | None: ...
     def header_encode(self, string: str) -> str: ...
     def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str]: ...
+    @overload
+    def body_encode(self, string: None) -> None: ...
+    @overload
     def body_encode(self, string: str) -> str: ...
     def __eq__(self, other: object) -> bool: ...
     def __ne__(self, __value: object) -> bool: ...
diff --git a/mypy/typeshed/stdlib/email/utils.pyi b/mypy/typeshed/stdlib/email/utils.pyi
index ed63b6b32312..186e768050be 100644
--- a/mypy/typeshed/stdlib/email/utils.pyi
+++ b/mypy/typeshed/stdlib/email/utils.pyi
@@ -60,7 +60,7 @@ else:
     def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: ...
 
 def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: ...
-def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ...
+def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ...  # May return list[str]. See issue #10431 for details.
 def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: ...
 def collapse_rfc2231_value(value: _ParamType, errors: str = "replace", fallback_charset: str = "us-ascii") -> str: ...
 def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: ...
diff --git a/mypy/typeshed/stdlib/errno.pyi b/mypy/typeshed/stdlib/errno.pyi
index 28874d44ff5f..84d2b44a6a61 100644
--- a/mypy/typeshed/stdlib/errno.pyi
+++ b/mypy/typeshed/stdlib/errno.pyi
@@ -91,9 +91,15 @@ ECANCELED: int  # undocumented
 ENOTRECOVERABLE: int  # undocumented
 EOWNERDEAD: int  # undocumented
 
+if sys.platform == "sunos5" or sys.platform == "solaris":  # noqa: Y008
+    ELOCKUNMAPPED: int
+    ENOTACTIVE: int
+
 if sys.platform != "win32":
     ENOTBLK: int
     EMULTIHOP: int
+
+if sys.platform == "darwin":
     # All of the below are undocumented
     EAUTH: int
     EBADARCH: int
@@ -112,9 +118,8 @@ if sys.platform != "win32":
     EPWROFF: int
     ERPCMISMATCH: int
     ESHLIBVERS: int
-
-    if sys.platform != "darwin" or sys.version_info >= (3, 11):
-        EQFULL: int  # undocumented
+    if sys.version_info >= (3, 11):
+        EQFULL: int
 
 if sys.platform != "darwin":
     EDEADLOCK: int
@@ -164,9 +169,6 @@ if sys.platform != "win32" and sys.platform != "darwin":
     ENOKEY: int
     ENOMEDIUM: int
     ERFKILL: int
-    EL: int
-    ELOCKUNMAPPED: int
-    ENOTACTIVE: int
 
 if sys.platform == "win32":
     # All of these are undocumented
diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi
index 1b4e59b7c120..8adc3d82292e 100644
--- a/mypy/typeshed/stdlib/functools.pyi
+++ b/mypy/typeshed/stdlib/functools.pyi
@@ -1,9 +1,9 @@
 import sys
 import types
-from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems
+from _typeshed import SupportsAllComparisons, SupportsItems
 from collections.abc import Callable, Hashable, Iterable, Sequence, Sized
 from typing import Any, Generic, NamedTuple, TypeVar, overload
-from typing_extensions import Literal, Self, TypeAlias, TypedDict, final
+from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final
 
 if sys.version_info >= (3, 9):
     from types import GenericAlias
@@ -28,10 +28,12 @@ if sys.version_info >= (3, 8):
 if sys.version_info >= (3, 9):
     __all__ += ["cache"]
 
-_AnyCallable: TypeAlias = Callable[..., object]
-
 _T = TypeVar("_T")
 _S = TypeVar("_S")
+_PWrapped = ParamSpec("_PWrapped")
+_RWrapped = TypeVar("_RWrapped")
+_PWrapper = ParamSpec("_PWrapper")
+_RWapper = TypeVar("_RWapper")
 
 @overload
 def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ...
@@ -85,31 +87,41 @@ else:
     ]
 WRAPPER_UPDATES: tuple[Literal["__dict__"]]
 
+class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWapper]):
+    __wrapped__: Callable[_PWrapped, _RWrapped]
+    def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWapper: ...
+    # as with ``Callable``, we'll assume that these attributes exist
+    __name__: str
+    __qualname__: str
+
+class _Wrapper(Generic[_PWrapped, _RWrapped]):
+    def __call__(self, f: Callable[_PWrapper, _RWapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ...
+
 if sys.version_info >= (3, 12):
     def update_wrapper(
-        wrapper: _T,
-        wrapped: _AnyCallable,
+        wrapper: Callable[_PWrapper, _RWapper],
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _T: ...
+    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ...
     def wraps(
-        wrapped: _AnyCallable,
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> IdentityFunction: ...
+    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
 
 else:
     def update_wrapper(
-        wrapper: _T,
-        wrapped: _AnyCallable,
+        wrapper: Callable[_PWrapper, _RWapper],
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _T: ...
+    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ...
     def wraps(
-        wrapped: _AnyCallable,
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> IdentityFunction: ...
+    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
 
 def total_ordering(cls: type[_T]) -> type[_T]: ...
 def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ...
diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi
index 7a4146885b29..fc42cf03e2bb 100644
--- a/mypy/typeshed/stdlib/ipaddress.pyi
+++ b/mypy/typeshed/stdlib/ipaddress.pyi
@@ -34,6 +34,7 @@ class _IPAddressBase:
 class _BaseAddress(_IPAddressBase, SupportsInt):
     def __init__(self, address: object) -> None: ...
     def __add__(self, other: int) -> Self: ...
+    def __hash__(self) -> int: ...
     def __int__(self) -> int: ...
     def __sub__(self, other: int) -> Self: ...
     if sys.version_info >= (3, 9):
diff --git a/mypy/typeshed/stdlib/json/__init__.pyi b/mypy/typeshed/stdlib/json/__init__.pyi
index dc0cdff926d4..63e9718ee151 100644
--- a/mypy/typeshed/stdlib/json/__init__.pyi
+++ b/mypy/typeshed/stdlib/json/__init__.pyi
@@ -1,4 +1,3 @@
-import sys
 from _typeshed import SupportsRead, SupportsWrite
 from collections.abc import Callable
 from typing import Any
@@ -7,8 +6,6 @@ from .decoder import JSONDecodeError as JSONDecodeError, JSONDecoder as JSONDeco
 from .encoder import JSONEncoder as JSONEncoder
 
 __all__ = ["dump", "dumps", "load", "loads", "JSONDecoder", "JSONDecodeError", "JSONEncoder"]
-if sys.version_info >= (3, 12):
-    __all__ += ["AttrDict"]
 
 def dumps(
     obj: Any,
@@ -62,9 +59,3 @@ def load(
     **kwds: Any,
 ) -> Any: ...
 def detect_encoding(b: bytes | bytearray) -> str: ...  # undocumented
-
-if sys.version_info >= (3, 12):
-    class AttrDict(dict[str, Any]):
-        def __getattr__(self, name: str) -> Any: ...
-        def __setattr__(self, name: str, value: Any) -> None: ...
-        def __delattr__(self, name: str) -> None: ...
diff --git a/mypy/typeshed/stdlib/linecache.pyi b/mypy/typeshed/stdlib/linecache.pyi
index 8e317dd38990..2e050e13b621 100644
--- a/mypy/typeshed/stdlib/linecache.pyi
+++ b/mypy/typeshed/stdlib/linecache.pyi
@@ -1,5 +1,6 @@
 import sys
-from typing import Any, Protocol
+from collections.abc import Callable
+from typing import Any
 from typing_extensions import TypeAlias
 
 if sys.version_info >= (3, 9):
@@ -10,8 +11,7 @@ else:
 _ModuleGlobals: TypeAlias = dict[str, Any]
 _ModuleMetadata: TypeAlias = tuple[int, float | None, list[str], str]
 
-class _SourceLoader(Protocol):
-    def __call__(self) -> str | None: ...
+_SourceLoader: TypeAlias = tuple[Callable[[], str | None]]
 
 cache: dict[str, _SourceLoader | _ModuleMetadata]  # undocumented
 
diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi
index 3c2ae0fe7ab1..a509ec3af9f2 100644
--- a/mypy/typeshed/stdlib/pathlib.pyi
+++ b/mypy/typeshed/stdlib/pathlib.pyi
@@ -39,6 +39,7 @@ class PurePath(PathLike[str]):
     @property
     def stem(self) -> str: ...
     def __new__(cls, *args: StrPath) -> Self: ...
+    def __hash__(self) -> int: ...
     def __eq__(self, other: object) -> bool: ...
     def __fspath__(self) -> str: ...
     def __lt__(self, other: PurePath) -> bool: ...
diff --git a/mypy/typeshed/stdlib/plistlib.pyi b/mypy/typeshed/stdlib/plistlib.pyi
index 5b76c935f76e..bd5525484514 100644
--- a/mypy/typeshed/stdlib/plistlib.pyi
+++ b/mypy/typeshed/stdlib/plistlib.pyi
@@ -102,6 +102,7 @@ if sys.version_info >= (3, 8):
         def __init__(self, data: int) -> None: ...
         def __index__(self) -> int: ...
         def __reduce__(self) -> tuple[type[Self], tuple[int]]: ...
+        def __hash__(self) -> int: ...
         def __eq__(self, other: object) -> bool: ...
 
 class InvalidFileException(ValueError):
diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
index 24974f787c62..cff0f5e5ff1d 100644
--- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
+++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
@@ -196,6 +196,25 @@ if sys.version_info >= (3, 11):
     SQLITE_WARNING: int
     SQLITE_WARNING_AUTOINDEX: int
 
+if sys.version_info >= (3, 12):
+    LEGACY_TRANSACTION_CONTROL: int
+    SQLITE_DBCONFIG_DEFENSIVE: int
+    SQLITE_DBCONFIG_DQS_DDL: int
+    SQLITE_DBCONFIG_DQS_DML: int
+    SQLITE_DBCONFIG_ENABLE_FKEY: int
+    SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER: int
+    SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION: int
+    SQLITE_DBCONFIG_ENABLE_QPSG: int
+    SQLITE_DBCONFIG_ENABLE_TRIGGER: int
+    SQLITE_DBCONFIG_ENABLE_VIEW: int
+    SQLITE_DBCONFIG_LEGACY_ALTER_TABLE: int
+    SQLITE_DBCONFIG_LEGACY_FILE_FORMAT: int
+    SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE: int
+    SQLITE_DBCONFIG_RESET_DATABASE: int
+    SQLITE_DBCONFIG_TRIGGER_EQP: int
+    SQLITE_DBCONFIG_TRUSTED_SCHEMA: int
+    SQLITE_DBCONFIG_WRITABLE_SCHEMA: int
+
 # Can take or return anything depending on what's in the registry.
 @overload
 def adapt(__obj: Any, __proto: Any) -> Any: ...
@@ -419,6 +438,7 @@ class Row:
     def __getitem__(self, __key: int | str) -> Any: ...
     @overload
     def __getitem__(self, __key: slice) -> tuple[Any, ...]: ...
+    def __hash__(self) -> int: ...
     def __iter__(self) -> Iterator[Any]: ...
     def __len__(self) -> int: ...
     # These return NotImplemented for anything that is not a Row.
diff --git a/mypy/typeshed/stdlib/statistics.pyi b/mypy/typeshed/stdlib/statistics.pyi
index af5fcec6ad0c..07174f4531b9 100644
--- a/mypy/typeshed/stdlib/statistics.pyi
+++ b/mypy/typeshed/stdlib/statistics.pyi
@@ -113,6 +113,7 @@ if sys.version_info >= (3, 8):
         __radd__ = __add__
         def __rsub__(self, x2: float | NormalDist) -> NormalDist: ...
         __rmul__ = __mul__
+        def __hash__(self) -> int: ...
 
 if sys.version_info >= (3, 12):
     def correlation(
diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi
index 3291b0c9dd98..a03c48c039dd 100644
--- a/mypy/typeshed/stdlib/tkinter/__init__.pyi
+++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi
@@ -500,7 +500,7 @@ class Misc:
     bbox = grid_bbox
     def grid_columnconfigure(
         self,
-        index: _GridIndex,
+        index: _GridIndex | list[int] | tuple[int, ...],
         cnf: _GridIndexInfo = {},
         *,
         minsize: _ScreenUnits = ...,
@@ -510,7 +510,7 @@ class Misc:
     ) -> _GridIndexInfo | Any: ...  # can be None but annoying to check
     def grid_rowconfigure(
         self,
-        index: _GridIndex,
+        index: _GridIndex | list[int] | tuple[int, ...],
         cnf: _GridIndexInfo = {},
         *,
         minsize: _ScreenUnits = ...,
@@ -1633,6 +1633,7 @@ class Canvas(Widget, XView, YView):
         activefill: str = ...,
         activestipple: str = ...,
         anchor: _Anchor = ...,
+        angle: float | str = ...,
         disabledfill: str = ...,
         disabledstipple: str = ...,
         fill: str = ...,
@@ -1653,6 +1654,7 @@ class Canvas(Widget, XView, YView):
         activefill: str = ...,
         activestipple: str = ...,
         anchor: _Anchor = ...,
+        angle: float | str = ...,
         disabledfill: str = ...,
         disabledstipple: str = ...,
         fill: str = ...,
diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi
index 009fdf51a440..d73566fc0917 100644
--- a/mypy/typeshed/stdlib/tkinter/ttk.pyi
+++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi
@@ -961,7 +961,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
         master: tkinter.Misc | None = None,
         *,
         class_: str = ...,
-        columns: str | list[str] | tuple[str, ...] = ...,
+        columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = ...,
         cursor: tkinter._Cursor = ...,
         displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ...,
         height: int = ...,
@@ -983,7 +983,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
         self,
         cnf: dict[str, Any] | None = None,
         *,
-        columns: str | list[str] | tuple[str, ...] = ...,
+        columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = ...,
         cursor: tkinter._Cursor = ...,
         displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ...,
         height: int = ...,
diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi
index 2c5f820ea365..7496a0920690 100644
--- a/mypy/typeshed/stdlib/typing.pyi
+++ b/mypy/typeshed/stdlib/typing.pyi
@@ -921,6 +921,7 @@ class ForwardRef:
         def _evaluate(self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None) -> Any | None: ...
 
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
     if sys.version_info >= (3, 11):
         def __or__(self, other: Any) -> _SpecialForm: ...
         def __ror__(self, other: Any) -> _SpecialForm: ...
diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi
index 0ed0701cc450..db1cc7d9bfc9 100644
--- a/mypy/typeshed/stdlib/unittest/mock.pyi
+++ b/mypy/typeshed/stdlib/unittest/mock.pyi
@@ -234,6 +234,8 @@ class _patch(Generic[_T]):
     def copy(self) -> _patch[_T]: ...
     @overload
     def __call__(self, func: _TT) -> _TT: ...
+    # If new==DEFAULT, this should add a MagicMock parameter to the function
+    # arguments. See the _patch_default_new class below for this functionality.
     @overload
     def __call__(self, func: Callable[_P, _R]) -> Callable[_P, _R]: ...
     if sys.version_info >= (3, 8):
@@ -257,6 +259,22 @@ class _patch(Generic[_T]):
     def start(self) -> _T: ...
     def stop(self) -> None: ...
 
+if sys.version_info >= (3, 8):
+    _Mock: TypeAlias = MagicMock | AsyncMock
+else:
+    _Mock: TypeAlias = MagicMock
+
+# This class does not exist at runtime, it's a hack to make this work:
+#     @patch("foo")
+#     def bar(..., mock: MagicMock) -> None: ...
+class _patch_default_new(_patch[_Mock]):
+    @overload
+    def __call__(self, func: _TT) -> _TT: ...
+    # Can't use the following as ParamSpec is only allowed as last parameter:
+    #   def __call__(self, func: Callable[_P, _R]) -> Callable[Concatenate[_P, MagicMock], _R]: ...
+    @overload
+    def __call__(self, func: Callable[..., _R]) -> Callable[..., _R]: ...
+
 class _patch_dict:
     in_dict: Any
     values: Any
@@ -273,11 +291,8 @@ class _patch_dict:
     start: Any
     stop: Any
 
-if sys.version_info >= (3, 8):
-    _Mock: TypeAlias = MagicMock | AsyncMock
-else:
-    _Mock: TypeAlias = MagicMock
-
+# This class does not exist at runtime, it's a hack to add methods to the
+# patch() function.
 class _patcher:
     TEST_PREFIX: str
     dict: type[_patch_dict]
@@ -307,7 +322,7 @@ class _patcher:
         autospec: Any | None = ...,
         new_callable: Any | None = ...,
         **kwargs: Any,
-    ) -> _patch[_Mock]: ...
+    ) -> _patch_default_new: ...
     @overload
     @staticmethod
     def object(  # type: ignore[misc]
diff --git a/mypy/typeshed/stdlib/uuid.pyi b/mypy/typeshed/stdlib/uuid.pyi
index 74ce4ebd6b47..fd87646531a6 100644
--- a/mypy/typeshed/stdlib/uuid.pyi
+++ b/mypy/typeshed/stdlib/uuid.pyi
@@ -1,11 +1,9 @@
+import builtins
 import sys
 from _typeshed import Unused
 from enum import Enum
 from typing_extensions import TypeAlias
 
-# Because UUID has properties called int and bytes we need to rename these temporarily.
-_Int: TypeAlias = int
-_Bytes: TypeAlias = bytes
 _FieldsType: TypeAlias = tuple[int, int, int, int, int, int]
 
 class SafeUUID(Enum):
@@ -17,49 +15,49 @@ class UUID:
     def __init__(
         self,
         hex: str | None = None,
-        bytes: _Bytes | None = None,
-        bytes_le: _Bytes | None = None,
+        bytes: builtins.bytes | None = None,
+        bytes_le: builtins.bytes | None = None,
         fields: _FieldsType | None = None,
-        int: _Int | None = None,
-        version: _Int | None = None,
+        int: builtins.int | None = None,
+        version: builtins.int | None = None,
         *,
         is_safe: SafeUUID = ...,
     ) -> None: ...
     @property
     def is_safe(self) -> SafeUUID: ...
     @property
-    def bytes(self) -> _Bytes: ...
+    def bytes(self) -> builtins.bytes: ...
     @property
-    def bytes_le(self) -> _Bytes: ...
+    def bytes_le(self) -> builtins.bytes: ...
     @property
-    def clock_seq(self) -> _Int: ...
+    def clock_seq(self) -> builtins.int: ...
     @property
-    def clock_seq_hi_variant(self) -> _Int: ...
+    def clock_seq_hi_variant(self) -> builtins.int: ...
     @property
-    def clock_seq_low(self) -> _Int: ...
+    def clock_seq_low(self) -> builtins.int: ...
     @property
     def fields(self) -> _FieldsType: ...
     @property
     def hex(self) -> str: ...
     @property
-    def int(self) -> _Int: ...
+    def int(self) -> builtins.int: ...
     @property
-    def node(self) -> _Int: ...
+    def node(self) -> builtins.int: ...
     @property
-    def time(self) -> _Int: ...
+    def time(self) -> builtins.int: ...
     @property
-    def time_hi_version(self) -> _Int: ...
+    def time_hi_version(self) -> builtins.int: ...
     @property
-    def time_low(self) -> _Int: ...
+    def time_low(self) -> builtins.int: ...
     @property
-    def time_mid(self) -> _Int: ...
+    def time_mid(self) -> builtins.int: ...
     @property
     def urn(self) -> str: ...
     @property
     def variant(self) -> str: ...
     @property
-    def version(self) -> _Int | None: ...
-    def __int__(self) -> _Int: ...
+    def version(self) -> builtins.int | None: ...
+    def __int__(self) -> builtins.int: ...
     def __eq__(self, other: object) -> bool: ...
     def __lt__(self, other: UUID) -> bool: ...
     def __le__(self, other: UUID) -> bool: ...
@@ -72,7 +70,7 @@ if sys.version_info >= (3, 9):
 else:
     def getnode(*, getters: Unused = None) -> int: ...  # undocumented
 
-def uuid1(node: _Int | None = None, clock_seq: _Int | None = None) -> UUID: ...
+def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: ...
 
 if sys.version_info >= (3, 12):
     def uuid3(namespace: UUID, name: str | bytes) -> UUID: ...

From 2f6b6e66ced53a0412c46851afb963f35a012f79 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Mon, 26 Sep 2022 12:55:07 -0700
Subject: [PATCH 006/288] Remove use of LiteralString in builtins (#13743)

---
 mypy/typeshed/stdlib/builtins.pyi | 93 -------------------------------
 1 file changed, 93 deletions(-)

diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index ea917bddb799..3de404b76652 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -56,7 +56,6 @@ from typing import (  # noqa: Y022
 from typing_extensions import (
     Concatenate,
     Literal,
-    LiteralString,
     ParamSpec,
     Self,
     SupportsIndex,
@@ -436,17 +435,8 @@ class str(Sequence[str]):
     def __new__(cls, object: object = ...) -> Self: ...
     @overload
     def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ...
-    @overload
-    def capitalize(self: LiteralString) -> LiteralString: ...
-    @overload
     def capitalize(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def casefold(self: LiteralString) -> LiteralString: ...
-    @overload
     def casefold(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
-    @overload
     def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
     def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
@@ -454,20 +444,11 @@ class str(Sequence[str]):
         self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
     if sys.version_info >= (3, 8):
-        @overload
-        def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ...
-        @overload
         def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ...  # type: ignore[misc]
     else:
-        @overload
-        def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ...
-        @overload
         def expandtabs(self, tabsize: int = 8) -> str: ...  # type: ignore[misc]
 
     def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
-    @overload
-    def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ...
-    @overload
     def format(self, *args: object, **kwargs: object) -> str: ...
     def format_map(self, map: _FormatMapMapping) -> str: ...
     def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
@@ -483,91 +464,32 @@ class str(Sequence[str]):
     def isspace(self) -> bool: ...
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
-    @overload
-    def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ...
-    @overload
     def join(self, __iterable: Iterable[str]) -> str: ...  # type: ignore[misc]
-    @overload
-    def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
-    @overload
     def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
-    @overload
-    def lower(self: LiteralString) -> LiteralString: ...
-    @overload
     def lower(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
-    @overload
     def lstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
-    @overload
-    def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
-    @overload
     def partition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
-    @overload
-    def replace(
-        self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1
-    ) -> LiteralString: ...
-    @overload
     def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ...  # type: ignore[misc]
     if sys.version_info >= (3, 9):
-        @overload
-        def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ...
-        @overload
         def removeprefix(self, __prefix: str) -> str: ...  # type: ignore[misc]
-        @overload
-        def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ...
-        @overload
         def removesuffix(self, __suffix: str) -> str: ...  # type: ignore[misc]
 
     def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
-    @overload
-    def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
-    @overload
     def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
-    @overload
-    def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
-    @overload
     def rpartition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
-    @overload
-    def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
-    @overload
     def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
-    @overload
-    def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
-    @overload
     def rstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
-    @overload
-    def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
-    @overload
     def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
-    @overload
-    def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ...
-    @overload
     def splitlines(self, keepends: bool = False) -> list[str]: ...  # type: ignore[misc]
     def startswith(
         self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
-    @overload
-    def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
-    @overload
     def strip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
-    @overload
-    def swapcase(self: LiteralString) -> LiteralString: ...
-    @overload
     def swapcase(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def title(self: LiteralString) -> LiteralString: ...
-    @overload
     def title(self) -> str: ...  # type: ignore[misc]
     def translate(self, __table: _TranslateTable) -> str: ...
-    @overload
-    def upper(self: LiteralString) -> LiteralString: ...
-    @overload
     def upper(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ...
-    @overload
     def zfill(self, __width: SupportsIndex) -> str: ...  # type: ignore[misc]
     @staticmethod
     @overload
@@ -578,9 +500,6 @@ class str(Sequence[str]):
     @staticmethod
     @overload
     def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ...
-    @overload
-    def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ...
-    @overload
     def __add__(self, __value: str) -> str: ...  # type: ignore[misc]
     # Incompatible with Sequence.__contains__
     def __contains__(self, __key: str) -> bool: ...  # type: ignore[override]
@@ -589,25 +508,13 @@ class str(Sequence[str]):
     def __getitem__(self, __key: SupportsIndex | slice) -> str: ...
     def __gt__(self, __value: str) -> bool: ...
     def __hash__(self) -> int: ...
-    @overload
-    def __iter__(self: LiteralString) -> Iterator[LiteralString]: ...
-    @overload
     def __iter__(self) -> Iterator[str]: ...  # type: ignore[misc]
     def __le__(self, __value: str) -> bool: ...
     def __len__(self) -> int: ...
     def __lt__(self, __value: str) -> bool: ...
-    @overload
-    def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ...
-    @overload
     def __mod__(self, __value: Any) -> str: ...
-    @overload
-    def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
-    @overload
     def __mul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __ne__(self, __value: object) -> bool: ...
-    @overload
-    def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
-    @overload
     def __rmul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __getnewargs__(self) -> tuple[str]: ...
 

From 120af30e706a0e0e29faf915c56c5b6781c9b204 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 29 Oct 2022 12:47:21 -0700
Subject: [PATCH 007/288] Revert sum literal integer change (#13961)

This is allegedly causing large performance problems, see 13821

typeshed/8231 had zero hits on mypy_primer, so it's not the worst thing
to undo. Patching this in typeshed also feels weird, since there's a
more general soundness issue. If a typevar has a bound or constraint, we
might not want to solve it to a Literal.

If we can confirm the performance regression or fix the unsoundness
within mypy, I might pursue upstreaming this in typeshed.

(Reminder: add this to the sync_typeshed script once merged)
---
 mypy/typeshed/stdlib/builtins.pyi | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index 3de404b76652..d6ca39049c77 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -1676,11 +1676,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit
 # Instead, we special-case the most common examples of this: bool and literal integers.
 if sys.version_info >= (3, 8):
     @overload
-    def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool], start: int = 0) -> int: ...  # type: ignore[misc]
 
 else:
     @overload
-    def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ...  # type: ignore[misc]
 
 @overload
 def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ...

From 1866d28f156c413ce8cc9fec0b317a7d02c28565 Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Mon, 1 May 2023 20:34:55 +0100
Subject: [PATCH 008/288] Revert typeshed ctypes change Since the plugin
 provides superior type checking:
 https://github.com/python/mypy/pull/13987#issuecomment-1310863427 A manual
 cherry-pick of e437cdf.

---
 mypy/typeshed/stdlib/_ctypes.pyi | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi
index 25d604218a00..756ee86d3342 100644
--- a/mypy/typeshed/stdlib/_ctypes.pyi
+++ b/mypy/typeshed/stdlib/_ctypes.pyi
@@ -151,11 +151,7 @@ class Array(Generic[_CT], _CData):
     def _type_(self) -> type[_CT]: ...
     @_type_.setter
     def _type_(self, value: type[_CT]) -> None: ...
-    # Note: only available if _CT == c_char
-    @property
-    def raw(self) -> bytes: ...
-    @raw.setter
-    def raw(self, value: ReadableBuffer) -> None: ...
+    raw: bytes  # Note: only available if _CT == c_char
     value: Any  # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
     # TODO These methods cannot be annotated correctly at the moment.
     # All of these "Any"s stand for the array's element type, but it's not possible to use _CT

From 3240da455e06a292669de22b0ef313dad43cb094 Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Sat, 4 Mar 2023 13:14:11 +0000
Subject: [PATCH 009/288] Revert use of `ParamSpec` for `functools.wraps`

---
 mypy/typeshed/stdlib/functools.pyi | 40 +++++++++++-------------------
 1 file changed, 14 insertions(+), 26 deletions(-)

diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi
index 8adc3d82292e..1b4e59b7c120 100644
--- a/mypy/typeshed/stdlib/functools.pyi
+++ b/mypy/typeshed/stdlib/functools.pyi
@@ -1,9 +1,9 @@
 import sys
 import types
-from _typeshed import SupportsAllComparisons, SupportsItems
+from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems
 from collections.abc import Callable, Hashable, Iterable, Sequence, Sized
 from typing import Any, Generic, NamedTuple, TypeVar, overload
-from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final
+from typing_extensions import Literal, Self, TypeAlias, TypedDict, final
 
 if sys.version_info >= (3, 9):
     from types import GenericAlias
@@ -28,12 +28,10 @@ if sys.version_info >= (3, 8):
 if sys.version_info >= (3, 9):
     __all__ += ["cache"]
 
+_AnyCallable: TypeAlias = Callable[..., object]
+
 _T = TypeVar("_T")
 _S = TypeVar("_S")
-_PWrapped = ParamSpec("_PWrapped")
-_RWrapped = TypeVar("_RWrapped")
-_PWrapper = ParamSpec("_PWrapper")
-_RWapper = TypeVar("_RWapper")
 
 @overload
 def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ...
@@ -87,41 +85,31 @@ else:
     ]
 WRAPPER_UPDATES: tuple[Literal["__dict__"]]
 
-class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWapper]):
-    __wrapped__: Callable[_PWrapped, _RWrapped]
-    def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWapper: ...
-    # as with ``Callable``, we'll assume that these attributes exist
-    __name__: str
-    __qualname__: str
-
-class _Wrapper(Generic[_PWrapped, _RWrapped]):
-    def __call__(self, f: Callable[_PWrapper, _RWapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ...
-
 if sys.version_info >= (3, 12):
     def update_wrapper(
-        wrapper: Callable[_PWrapper, _RWapper],
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapper: _T,
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ...
+    ) -> _T: ...
     def wraps(
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
+    ) -> IdentityFunction: ...
 
 else:
     def update_wrapper(
-        wrapper: Callable[_PWrapper, _RWapper],
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapper: _T,
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ...
+    ) -> _T: ...
     def wraps(
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
+    ) -> IdentityFunction: ...
 
 def total_ordering(cls: type[_T]) -> type[_T]: ...
 def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ...

From 3f601c3641ecde3557520ddc64a18baa40b12e35 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Mon, 17 Jul 2023 02:09:28 +0100
Subject: [PATCH 010/288] Remove unneeded `--strict-optional` flags from test
 cases (#15684)

---
 test-data/unit/check-abstract.test            | 17 ---------
 test-data/unit/check-basic.test               |  1 -
 test-data/unit/check-class-namedtuple.test    |  1 -
 test-data/unit/check-classes.test             | 36 ++++++++-----------
 test-data/unit/check-columns.test             |  1 -
 test-data/unit/check-custom-plugin.test       |  2 +-
 test-data/unit/check-dataclasses.test         |  8 ++---
 test-data/unit/check-enum.test                |  4 ---
 test-data/unit/check-errorcodes.test          |  7 +---
 test-data/unit/check-expressions.test         |  5 ++-
 test-data/unit/check-flags.test               |  2 +-
 test-data/unit/check-functions.test           |  2 --
 test-data/unit/check-generics.test            |  7 ++--
 test-data/unit/check-incremental.test         | 13 ++-----
 test-data/unit/check-inference-context.test   | 13 -------
 test-data/unit/check-inference.test           | 19 +++-------
 test-data/unit/check-inline-config.test       |  4 +--
 test-data/unit/check-isinstance.test          | 15 --------
 test-data/unit/check-kwargs.test              |  1 -
 test-data/unit/check-lists.test               |  1 -
 test-data/unit/check-literal.test             |  6 ----
 test-data/unit/check-modules.test             |  1 -
 test-data/unit/check-narrowing.test           | 13 ++-----
 test-data/unit/check-native-int.test          |  2 --
 test-data/unit/check-newsyntax.test           |  4 ---
 test-data/unit/check-overloading.test         | 11 ++----
 .../unit/check-parameter-specification.test   |  1 -
 test-data/unit/check-plugin-attrs.test        |  2 --
 test-data/unit/check-protocols.test           | 19 ++++------
 test-data/unit/check-python310.test           |  5 ---
 test-data/unit/check-python38.test            | 10 +++---
 test-data/unit/check-recursive-types.test     |  4 ---
 test-data/unit/check-serialize.test           |  1 -
 test-data/unit/check-type-aliases.test        |  1 -
 test-data/unit/check-typeddict.test           |  8 -----
 test-data/unit/check-typeguard.test           |  1 -
 test-data/unit/check-unions.test              |  5 ---
 test-data/unit/check-unreachable-code.test    |  1 -
 test-data/unit/check-varargs.test             |  1 -
 test-data/unit/check-warnings.test            |  2 +-
 test-data/unit/deps.test                      |  1 -
 test-data/unit/fine-grained-suggest.test      | 17 ---------
 test-data/unit/fine-grained.test              | 22 ++++--------
 test-data/unit/pythoneval.test                |  9 +++--
 44 files changed, 58 insertions(+), 248 deletions(-)

diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test
index dc64476beda6..299074050baa 100644
--- a/test-data/unit/check-abstract.test
+++ b/test-data/unit/check-abstract.test
@@ -1125,7 +1125,6 @@ b.y = 1
 -- -----------------------------------------------
 
 [case testEmptyBodyProhibitedFunction]
-# flags: --strict-optional
 from typing import overload, Union
 
 def func1(x: str) -> int: pass  # E: Missing return statement
@@ -1148,7 +1147,6 @@ def func5(x: Union[int, str]) -> Union[int, str]:  # E: Missing return statement
     """Some function."""
 
 [case testEmptyBodyProhibitedMethodNonAbstract]
-# flags: --strict-optional
 from typing import overload, Union
 
 class A:
@@ -1183,7 +1181,6 @@ class C:
 [builtins fixtures/classmethod.pyi]
 
 [case testEmptyBodyProhibitedPropertyNonAbstract]
-# flags: --strict-optional
 class A:
     @property
     def x(self) -> int: ...  # E: Missing return statement
@@ -1212,7 +1209,6 @@ class C:
 [builtins fixtures/property.pyi]
 
 [case testEmptyBodyNoteABCMeta]
-# flags: --strict-optional
 from abc import ABC
 
 class A(ABC):
@@ -1221,7 +1217,6 @@ class A(ABC):
         ...
 
 [case testEmptyBodyAllowedFunctionStub]
-# flags: --strict-optional
 import stub
 [file stub.pyi]
 from typing import overload, Union
@@ -1232,7 +1227,6 @@ def func3(x: str) -> int:
     """Some function."""
 
 [case testEmptyBodyAllowedMethodNonAbstractStub]
-# flags: --strict-optional
 import stub
 [file stub.pyi]
 from typing import overload, Union
@@ -1254,7 +1248,6 @@ class B:
 [builtins fixtures/classmethod.pyi]
 
 [case testEmptyBodyAllowedPropertyNonAbstractStub]
-# flags: --strict-optional
 import stub
 [file stub.pyi]
 class A:
@@ -1285,7 +1278,6 @@ class C:
 [builtins fixtures/property.pyi]
 
 [case testEmptyBodyAllowedMethodAbstract]
-# flags: --strict-optional
 from typing import overload, Union
 from abc import abstractmethod
 
@@ -1333,7 +1325,6 @@ class C:
 [builtins fixtures/classmethod.pyi]
 
 [case testEmptyBodyAllowedPropertyAbstract]
-# flags: --strict-optional
 from abc import abstractmethod
 class A:
     @property
@@ -1372,7 +1363,6 @@ class C:
 [builtins fixtures/property.pyi]
 
 [case testEmptyBodyImplicitlyAbstractProtocol]
-# flags: --strict-optional
 from typing import Protocol, overload, Union
 
 class P1(Protocol):
@@ -1413,7 +1403,6 @@ C3()
 [builtins fixtures/classmethod.pyi]
 
 [case testEmptyBodyImplicitlyAbstractProtocolProperty]
-# flags: --strict-optional
 from typing import Protocol
 
 class P1(Protocol):
@@ -1443,7 +1432,6 @@ C2()
 [builtins fixtures/property.pyi]
 
 [case testEmptyBodyImplicitlyAbstractProtocolStub]
-# flags: --strict-optional
 from stub import P1, P2, P3, P4
 
 class B1(P1): ...
@@ -1479,7 +1467,6 @@ class P4(Protocol):
 [builtins fixtures/classmethod.pyi]
 
 [case testEmptyBodyUnsafeAbstractSuper]
-# flags: --strict-optional
 from stub import StubProto, StubAbstract
 from typing import Protocol
 from abc import abstractmethod
@@ -1528,7 +1515,6 @@ class StubAbstract:
     def meth(self) -> int: ...
 
 [case testEmptyBodyUnsafeAbstractSuperProperty]
-# flags: --strict-optional
 from stub import StubProto, StubAbstract
 from typing import Protocol
 from abc import abstractmethod
@@ -1586,7 +1572,6 @@ class StubAbstract:
 [builtins fixtures/property.pyi]
 
 [case testEmptyBodyUnsafeAbstractSuperOverloads]
-# flags: --strict-optional
 from stub import StubProto
 from typing import Protocol, overload, Union
 
@@ -1671,7 +1656,6 @@ class SubAbstract(Abstract):
         return super().meth()
 
 [case testEmptyBodyNoSuperWarningOptionalReturn]
-# flags: --strict-optional
 from typing import Protocol, Optional
 from abc import abstractmethod
 
@@ -1689,7 +1673,6 @@ class SubAbstract(Abstract):
         return super().meth()
 
 [case testEmptyBodyTypeCheckingOnly]
-# flags: --strict-optional
 from typing import TYPE_CHECKING
 
 class C:
diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test
index 408c3599672b..61a7160ce4f4 100644
--- a/test-data/unit/check-basic.test
+++ b/test-data/unit/check-basic.test
@@ -385,7 +385,6 @@ y = x # E: Incompatible types in assignment (expression has type "Dict[str, int]
 [builtins fixtures/dict.pyi]
 
 [case testDistinctTypes]
-# flags: --strict-optional
 import b
 
 [file a.py]
diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test
index 1916cb41bb74..a095f212b900 100644
--- a/test-data/unit/check-class-namedtuple.test
+++ b/test-data/unit/check-class-namedtuple.test
@@ -419,7 +419,6 @@ UserDefined(1)  # E: Argument 1 to "UserDefined" has incompatible type "int"; ex
 [builtins fixtures/list.pyi]
 
 [case testNewNamedTupleWithDefaultsStrictOptional]
-# flags: --strict-optional
 from typing import List, NamedTuple, Optional
 
 class HasNone(NamedTuple):
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
index 957eb9214d7c..b9e65ef4ad20 100644
--- a/test-data/unit/check-classes.test
+++ b/test-data/unit/check-classes.test
@@ -935,7 +935,6 @@ if int():
     b = D2()
 
 [case testConstructorJoinsWithCustomMetaclass]
-# flags: --strict-optional
 from typing import TypeVar
 import abc
 
@@ -1629,7 +1628,6 @@ a = A()
 reveal_type(a.f)  # N: Revealed type is "__main__.D"
 
 [case testAccessingDescriptorFromClass]
-# flags: --strict-optional
 from d import D, Base
 class A(Base):
     f = D()
@@ -1647,7 +1645,6 @@ class D:
 [builtins fixtures/bool.pyi]
 
 [case testAccessingDescriptorFromClassWrongBase]
-# flags: --strict-optional
 from d import D, Base
 class A:
     f = D()
@@ -1664,13 +1661,13 @@ class D:
     def __get__(self, inst: Base, own: Type[Base]) -> str: pass
 [builtins fixtures/bool.pyi]
 [out]
-main:5: error: Argument 2 to "__get__" of "D" has incompatible type "Type[A]"; expected "Type[Base]"
-main:5: note: Revealed type is "d.D"
-main:6: error: No overload variant of "__get__" of "D" matches argument types "A", "Type[A]"
-main:6: note: Possible overload variants:
-main:6: note:     def __get__(self, inst: None, own: Type[Base]) -> D
-main:6: note:     def __get__(self, inst: Base, own: Type[Base]) -> str
-main:6: note: Revealed type is "Any"
+main:4: error: Argument 2 to "__get__" of "D" has incompatible type "Type[A]"; expected "Type[Base]"
+main:4: note: Revealed type is "d.D"
+main:5: error: No overload variant of "__get__" of "D" matches argument types "A", "Type[A]"
+main:5: note: Possible overload variants:
+main:5: note:     def __get__(self, inst: None, own: Type[Base]) -> D
+main:5: note:     def __get__(self, inst: Base, own: Type[Base]) -> str
+main:5: note: Revealed type is "Any"
 
 [case testAccessingGenericNonDataDescriptor]
 from typing import TypeVar, Type, Generic, Any
@@ -1702,7 +1699,6 @@ a.g = ''
 a.g = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str")
 
 [case testAccessingGenericDescriptorFromClass]
-# flags: --strict-optional
 from d import D
 class A:
     f = D(10)  # type: D[A, int]
@@ -1724,7 +1720,6 @@ class D(Generic[T, V]):
 [builtins fixtures/bool.pyi]
 
 [case testAccessingGenericDescriptorFromInferredClass]
-# flags: --strict-optional
 from typing import Type
 from d import D
 class A:
@@ -1745,11 +1740,10 @@ class D(Generic[T, V]):
     def __get__(self, inst: T, own: Type[T]) -> V: pass
 [builtins fixtures/bool.pyi]
 [out]
-main:8: note: Revealed type is "d.D[__main__.A, builtins.int]"
-main:9: note: Revealed type is "d.D[__main__.A, builtins.str]"
+main:7: note: Revealed type is "d.D[__main__.A, builtins.int]"
+main:8: note: Revealed type is "d.D[__main__.A, builtins.str]"
 
 [case testAccessingGenericDescriptorFromClassBadOverload]
-# flags: --strict-optional
 from d import D
 class A:
     f = D(10)  # type: D[A, int]
@@ -1766,11 +1760,11 @@ class D(Generic[T, V]):
     def __get__(self, inst: T, own: Type[T]) -> V: pass
 [builtins fixtures/bool.pyi]
 [out]
-main:5: error: No overload variant of "__get__" of "D" matches argument types "None", "Type[A]"
-main:5: note: Possible overload variants:
-main:5: note:     def __get__(self, inst: None, own: None) -> D[A, int]
-main:5: note:     def __get__(self, inst: A, own: Type[A]) -> int
-main:5: note: Revealed type is "Any"
+main:4: error: No overload variant of "__get__" of "D" matches argument types "None", "Type[A]"
+main:4: note: Possible overload variants:
+main:4: note:     def __get__(self, inst: None, own: None) -> D[A, int]
+main:4: note:     def __get__(self, inst: A, own: Type[A]) -> int
+main:4: note: Revealed type is "Any"
 
 [case testAccessingNonDataDescriptorSubclass]
 from typing import Any
@@ -6484,7 +6478,6 @@ def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ...
 [out]
 
 [case testOptionalDescriptorsBinder]
-# flags: --strict-optional
 from typing import Type, TypeVar, Optional
 T = TypeVar('T')
 
@@ -6698,7 +6691,6 @@ class C(Generic[T]):
 [builtins fixtures/isinstancelist.pyi]
 
 [case testIsInstanceTypeSubclass]
-# flags: --strict-optional
 from typing import Type, Optional
 class Base: ...
 class One(Base):
diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test
index 9d9a7d9ac039..44524b9df943 100644
--- a/test-data/unit/check-columns.test
+++ b/test-data/unit/check-columns.test
@@ -27,7 +27,6 @@ A().f(1, 1) # E:10: Argument 2 to "f" of "A" has incompatible type "int"; expect
 (A().f(1, 'hello', 'hi')) # E:2: Too many arguments for "f" of "A"
 
 [case testColumnsInvalidArgumentType]
-# flags: --strict-optional
 def f(x: int, y: str) -> None: ...
 def g(*x: int) -> None: pass
 def h(**x: int) -> None: pass
diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test
index ec5bce219dbd..9a0668f98c21 100644
--- a/test-data/unit/check-custom-plugin.test
+++ b/test-data/unit/check-custom-plugin.test
@@ -802,7 +802,7 @@ else:
 plugins=<ROOT>/test-data/unit/plugins/union_method.py
 
 [case testGetMethodHooksOnUnionsStrictOptional]
-# flags: --config-file tmp/mypy.ini --strict-optional
+# flags: --config-file tmp/mypy.ini
 from typing import Union
 
 class Foo:
diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test
index adcaa60a5b19..3866442230bf 100644
--- a/test-data/unit/check-dataclasses.test
+++ b/test-data/unit/check-dataclasses.test
@@ -1127,7 +1127,6 @@ class Foo:
 
 [case testNoComplainFieldNoneStrict]
 # flags: --python-version 3.7
-# flags: --strict-optional
 from dataclasses import dataclass, field
 from typing import Optional
 
@@ -1264,7 +1263,7 @@ class Deferred: pass
 [builtins fixtures/dataclasses.pyi]
 
 [case testDeferredDataclassInitSignatureSubclass]
-# flags: --strict-optional --python-version 3.7
+# flags: --python-version 3.7
 from dataclasses import dataclass
 from typing import Optional
 
@@ -1745,7 +1744,7 @@ reveal_type(Child2[int, A]([A()], [1]).b)  # N: Revealed type is "builtins.list[
 [builtins fixtures/dataclasses.pyi]
 
 [case testDataclassInheritOptionalType]
-# flags: --python-version 3.7 --strict-optional
+# flags: --python-version 3.7
 from dataclasses import dataclass
 from typing import Any, Callable, Generic, TypeVar, List, Optional
 
@@ -1979,7 +1978,6 @@ B = List[C]
 [builtins fixtures/dataclasses.pyi]
 
 [case testDataclassSelfType]
-# flags: --strict-optional
 from dataclasses import dataclass
 from typing import Self, TypeVar, Generic, Optional
 
@@ -2104,7 +2102,6 @@ a2 = replace(a, q='42')  # E: Argument "q" to "replace" of "A" has incompatible
 reveal_type(a2)  # N: Revealed type is "__main__.A"
 
 [case testReplaceUnion]
-# flags: --strict-optional
 from typing import Generic, Union, TypeVar
 from dataclasses import dataclass, replace, InitVar
 
@@ -2136,7 +2133,6 @@ _ = replace(a_or_b, y=42, init_var=42)  # E: Argument "y" to "replace" of "Union
 [builtins fixtures/dataclasses.pyi]
 
 [case testReplaceUnionOfTypeVar]
-# flags: --strict-optional
 from typing import Generic, Union, TypeVar
 from dataclasses import dataclass, replace
 
diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test
index ce7e173f635d..6779ae266454 100644
--- a/test-data/unit/check-enum.test
+++ b/test-data/unit/check-enum.test
@@ -953,7 +953,6 @@ else:
 [builtins fixtures/bool.pyi]
 
 [case testEnumReachabilityWithNone]
-# flags: --strict-optional
 from enum import Enum
 from typing import Optional
 
@@ -1016,7 +1015,6 @@ reveal_type(x3) # N: Revealed type is "Union[__main__.Foo, __main__.Bar]"
 [builtins fixtures/bool.pyi]
 
 [case testEnumReachabilityPEP484ExampleWithFinal]
-# flags: --strict-optional
 from typing import Union
 from typing_extensions import Final
 from enum import Enum
@@ -1063,7 +1061,6 @@ def process(response: Union[str, Reason] = '') -> str:
 
 
 [case testEnumReachabilityPEP484ExampleSingleton]
-# flags: --strict-optional
 from typing import Union
 from typing_extensions import Final
 from enum import Enum
@@ -1088,7 +1085,6 @@ def func(x: Union[int, None, Empty] = _empty) -> int:
 [builtins fixtures/primitives.pyi]
 
 [case testEnumReachabilityPEP484ExampleSingletonWithMethod]
-# flags: --strict-optional
 from typing import Union
 from typing_extensions import Final
 from enum import Enum
diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test
index 1e7dc9364855..1efbab7de322 100644
--- a/test-data/unit/check-errorcodes.test
+++ b/test-data/unit/check-errorcodes.test
@@ -732,7 +732,6 @@ class InvalidReturn:
 [builtins fixtures/bool.pyi]
 
 [case testErrorCodeOverloadedOperatorMethod]
-# flags: --strict-optional
 from typing import Optional, overload
 
 class A:
@@ -758,7 +757,6 @@ class C:
 x - C()  # type: ignore[operator]
 
 [case testErrorCodeMultiLineBinaryOperatorOperand]
-# flags: --strict-optional
 from typing import Optional
 
 class C: pass
@@ -897,7 +895,6 @@ if any_or_object:
 [builtins fixtures/list.pyi]
 
 [case testTruthyFunctions]
-# flags: --strict-optional
 def f():
     pass
 if f:  # E: Function "f" could always be true in boolean context  [truthy-function]
@@ -907,7 +904,7 @@ if not f:  # E: Function "f" could always be true in boolean context  [truthy-fu
 conditional_result = 'foo' if f else 'bar'  # E: Function "f" could always be true in boolean context  [truthy-function]
 
 [case testTruthyIterable]
-# flags: --strict-optional --enable-error-code truthy-iterable
+# flags: --enable-error-code truthy-iterable
 from typing import Iterable
 def func(var: Iterable[str]) -> None:
     if var:  # E: "var" has type "Iterable[str]" which can always be true in boolean context. Consider using "Collection[str]" instead.  [truthy-iterable]
@@ -995,7 +992,6 @@ var: int = ""  # E: Incompatible types in assignment (expression has type "str",
 show_error_codes = True
 
 [case testErrorCodeUnsafeSuper_no_empty]
-# flags: --strict-optional
 from abc import abstractmethod
 
 class Base:
@@ -1008,7 +1004,6 @@ class Sub(Base):
 [builtins fixtures/exception.pyi]
 
 [case testDedicatedErrorCodeForEmpty_no_empty]
-# flags: --strict-optional
 from typing import Optional
 def foo() -> int: ...  # E: Missing return statement  [empty-body]
 def bar() -> None: ...
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
index 8231b0a3265f..40ee28830b21 100644
--- a/test-data/unit/check-expressions.test
+++ b/test-data/unit/check-expressions.test
@@ -1210,7 +1210,6 @@ a[:None]
 [builtins fixtures/slice.pyi]
 
 [case testNoneSliceBoundsWithStrictOptional]
-# flags: --strict-optional
 from typing import Any
 a: Any
 a[None:1]
@@ -2049,7 +2048,7 @@ x is 42
 [typing fixtures/typing-full.pyi]
 
 [case testStrictEqualityStrictOptional]
-# flags: --strict-equality --strict-optional
+# flags: --strict-equality
 
 x: str
 if x is not None:  # OK even with strict-optional
@@ -2065,7 +2064,7 @@ if x is not None:  # OK without strict-optional
 [builtins fixtures/bool.pyi]
 
 [case testStrictEqualityEqNoOptionalOverlap]
-# flags: --strict-equality --strict-optional
+# flags: --strict-equality
 from typing import Optional
 
 x: Optional[str]
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
index 3750c44ed7f3..e21157eae991 100644
--- a/test-data/unit/check-flags.test
+++ b/test-data/unit/check-flags.test
@@ -427,7 +427,7 @@ async def h() -> NoReturn:  # E: Implicit return in function which does not retu
 [typing fixtures/typing-async.pyi]
 
 [case testNoWarnNoReturn]
-# flags: --no-warn-no-return --strict-optional
+# flags: --no-warn-no-return
 import typing
 
 def implicit_optional_return(arg) -> typing.Optional[str]:
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
index 0de4798ea1f5..a8722d8190b9 100644
--- a/test-data/unit/check-functions.test
+++ b/test-data/unit/check-functions.test
@@ -2545,7 +2545,6 @@ reveal_type(bar(None))  # N: Revealed type is "None"
 [out]
 
 [case testNoComplainOverloadNoneStrict]
-# flags: --strict-optional
 from typing import overload, Optional
 @overload
 def bar(x: None) -> None:
@@ -2574,7 +2573,6 @@ xx: Optional[int] = X(x_in)
 [out]
 
 [case testNoComplainInferredNoneStrict]
-# flags: --strict-optional
 from typing import TypeVar, Optional
 T = TypeVar('T')
 def X(val: T) -> T: ...
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 90d46c217451..34588bfceb3d 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -2323,7 +2323,6 @@ class B(A):
 [builtins fixtures/classmethod.pyi]
 
 [case testSubclassingGenericSelfClassMethodOptional]
-# flags: --strict-optional
 from typing import TypeVar, Type, Optional
 
 AT = TypeVar('AT', bound='A')
@@ -2935,7 +2934,7 @@ reveal_type(dec(id))  # N: Revealed type is "def [S] (S`1) -> builtins.list[S`1]
 [builtins fixtures/list.pyi]
 
 [case testInferenceAgainstGenericCallableGenericProtocol]
-# flags: --strict-optional --new-type-inference
+# flags: --new-type-inference
 from typing import TypeVar, Protocol, Generic, Optional
 
 T = TypeVar('T')
@@ -2951,7 +2950,7 @@ reveal_type(lift(g))  # N: Revealed type is "def [T] (Union[T`1, None]) -> Union
 [builtins fixtures/list.pyi]
 
 [case testInferenceAgainstGenericSplitOrder]
-# flags: --strict-optional --new-type-inference
+# flags: --new-type-inference
 from typing import TypeVar, Callable, List
 
 S = TypeVar('S')
@@ -2966,7 +2965,7 @@ reveal_type(dec(id, id))  # N: Revealed type is "def (builtins.int) -> builtins.
 [builtins fixtures/list.pyi]
 
 [case testInferenceAgainstGenericSplitOrderGeneric]
-# flags: --strict-optional --new-type-inference
+# flags: --new-type-inference
 from typing import TypeVar, Callable, Tuple
 
 S = TypeVar('S')
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index d8461fc78815..80f5e4e7d12d 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -2901,7 +2901,6 @@ tmp/main.py:2: error: Expression has type "Any"
 tmp/main.py:2: error: Expression has type "Any"
 
 [case testIncrementalStrictOptional]
-# flags: --strict-optional
 import a
 1 + a.foo()
 [file a.py]
@@ -2911,8 +2910,8 @@ from typing import Optional
 def foo() -> Optional[int]: return 0
 [out1]
 [out2]
-main:3: error: Unsupported operand types for + ("int" and "None")
-main:3: note: Right operand is of type "Optional[int]"
+main:2: error: Unsupported operand types for + ("int" and "None")
+main:2: note: Right operand is of type "Optional[int]"
 
 [case testAttrsIncrementalSubclassingCached]
 from a import A
@@ -3457,7 +3456,6 @@ main:2: error: Cannot find implementation or library stub for module named "a"
 main:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
 
 [case testIncrementalInheritanceAddAnnotation]
-# flags: --strict-optional
 import a
 [file a.py]
 import b
@@ -5757,7 +5755,6 @@ class C:
 [builtins fixtures/tuple.pyi]
 
 [case testNamedTupleUpdateNonRecursiveToRecursiveCoarse]
-# flags: --strict-optional
 import c
 [file a.py]
 from b import M
@@ -5800,7 +5797,6 @@ tmp/c.py:5: error: Incompatible types in assignment (expression has type "Option
 tmp/c.py:7: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins.int, fallback=b.M], None], builtins.int, fallback=a.N]"
 
 [case testTupleTypeUpdateNonRecursiveToRecursiveCoarse]
-# flags: --strict-optional
 import c
 [file a.py]
 from b import M
@@ -5833,7 +5829,6 @@ tmp/c.py:4: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins
 tmp/c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int")
 
 [case testTypeAliasUpdateNonRecursiveToRecursiveCoarse]
-# flags: --strict-optional
 import c
 [file a.py]
 from b import M
@@ -5866,7 +5861,6 @@ tmp/c.py:4: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins
 tmp/c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int")
 
 [case testTypedDictUpdateNonRecursiveToRecursiveCoarse]
-# flags: --strict-optional
 import c
 [file a.py]
 from b import M
@@ -6061,7 +6055,6 @@ tmp/m.py:9: note:     Got:
 tmp/m.py:9: note:         def update() -> str
 
 [case testAbstractBodyTurnsEmptyCoarse]
-# flags: --strict-optional
 from b import Base
 
 class Sub(Base):
@@ -6081,7 +6074,7 @@ class Base:
     def meth(self) -> int: ...
 [out]
 [out2]
-main:6: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe
+main:5: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe
 
 [case testNoCrashDoubleReexportFunctionEmpty]
 import m
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
index 59f515490964..ba36c1548532 100644
--- a/test-data/unit/check-inference-context.test
+++ b/test-data/unit/check-inference-context.test
@@ -925,7 +925,6 @@ reveal_type(f(None)) # N: Revealed type is "Union[None, builtins.list[builtins.i
 [builtins fixtures/list.pyi]
 
 [case testUnionWithGenericTypeItemContextAndStrictOptional]
-# flags: --strict-optional
 from typing import TypeVar, Union, List
 
 T = TypeVar('T')
@@ -953,7 +952,6 @@ reveal_type(c.f(None)) # N: Revealed type is "Union[builtins.list[builtins.int],
 [builtins fixtures/list.pyi]
 
 [case testGenericMethodCalledInGenericContext]
-# flags: --strict-optional
 from typing import TypeVar, Generic
 
 _KT = TypeVar('_KT')
@@ -1221,7 +1219,6 @@ x: Iterable[Union[A, B]] = f(B())
 [builtins fixtures/list.pyi]
 
 [case testWideOuterContextOptional]
-# flags: --strict-optional
 from typing import Optional, Type, TypeVar
 
 class Custom:
@@ -1235,7 +1232,6 @@ def b(x: T) -> Optional[T]:
     return a(x)
 
 [case testWideOuterContextOptionalGenericReturn]
-# flags: --strict-optional
 from typing import Optional, Type, TypeVar, Iterable
 
 class Custom:
@@ -1249,7 +1245,6 @@ def b(x: T) -> Iterable[Optional[T]]:
     return a(x)
 
 [case testWideOuterContextOptionalMethod]
-# flags: --strict-optional
 from typing import Optional, Type, TypeVar
 
 class A: pass
@@ -1282,7 +1277,6 @@ def bar(xs: List[S]) -> S:
 [builtins fixtures/list.pyi]
 
 [case testWideOuterContextOptionalTypeVarReturn]
-# flags: --strict-optional
 from typing import Callable, Iterable, List, Optional, TypeVar
 
 class C:
@@ -1298,7 +1292,6 @@ def g(l: List[C], x: str) -> Optional[C]:
 [builtins fixtures/list.pyi]
 
 [case testWideOuterContextOptionalTypeVarReturnLambda]
-# flags: --strict-optional
 from typing import Callable, Iterable, List, Optional, TypeVar
 
 class C:
@@ -1335,7 +1328,6 @@ y: List[str] = f([]) \
 [builtins fixtures/list.pyi]
 
 [case testWideOuterContextNoArgs]
-# flags: --strict-optional
 from typing import TypeVar, Optional
 
 T = TypeVar('T', bound=int)
@@ -1344,7 +1336,6 @@ def f(x: Optional[T] = None) -> T: ...
 y: str = f()
 
 [case testWideOuterContextNoArgsError]
-# flags: --strict-optional
 from typing import TypeVar, Optional, List
 
 T = TypeVar('T', bound=int)
@@ -1427,7 +1418,6 @@ bar({1: 2})
 [builtins fixtures/dict.pyi]
 
 [case testOptionalTypeNarrowedByGenericCall]
-# flags: --strict-optional
 from typing import Dict, Optional
 
 d: Dict[str, str] = {}
@@ -1439,7 +1429,6 @@ def foo(arg: Optional[str] = None) -> None:
 [builtins fixtures/dict.pyi]
 
 [case testOptionalTypeNarrowedByGenericCall2]
-# flags: --strict-optional
 from typing import Dict, Optional
 
 d: Dict[str, str] = {}
@@ -1451,7 +1440,6 @@ if x:
 [builtins fixtures/dict.pyi]
 
 [case testOptionalTypeNarrowedByGenericCall3]
-# flags: --strict-optional
 from typing import Generic, TypeVar, Union
 
 T = TypeVar("T")
@@ -1464,7 +1452,6 @@ def foo(arg: Union[str, int]) -> None:
 [builtins fixtures/isinstance.pyi]
 
 [case testOptionalTypeNarrowedByGenericCall4]
-# flags: --strict-optional
 from typing import Optional, List, Generic, TypeVar
 
 T = TypeVar("T", covariant=True)
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 3c4a0943556a..e0f29a19ec1d 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -1402,7 +1402,6 @@ f(b)
 g(b)
 
 [case testLambdaDefaultContext]
-# flags: --strict-optional
 from typing import Callable
 def f(a: Callable[..., None] = lambda *a, **k: None):
     pass
@@ -1811,7 +1810,6 @@ reveal_type(C().a)  # N: Revealed type is "builtins.dict[builtins.int, builtins.
 [builtins fixtures/dict.pyi]
 
 [case testInferAttributeInitializedToNoneAndAssigned]
-# flags: --strict-optional
 class C:
     def __init__(self) -> None:
         self.a = None
@@ -1858,7 +1856,6 @@ reveal_type(C().a)  # N: Revealed type is "builtins.dict[Any, Any]"
 [builtins fixtures/dict.pyi]
 
 [case testInferAttributeInitializedToNoneAndAssignedOtherMethod]
-# flags: --strict-optional
 class C:
     def __init__(self) -> None:
         self.a = None
@@ -1891,7 +1888,6 @@ reveal_type(C().a)  # N: Revealed type is "builtins.dict[Any, Any]"
 [builtins fixtures/dict.pyi]
 
 [case testInferAttributeInitializedToNoneAndAssignedClassBody]
-# flags: --strict-optional
 class C:
     a = None
     def __init__(self) -> None:
@@ -2538,7 +2534,6 @@ if bool():
 [out]
 
 [case testDontMarkUnreachableAfterInferenceUninhabited2]
-# flags: --strict-optional
 from typing import TypeVar, Optional
 T = TypeVar('T')
 def f(x: Optional[T] = None) -> T: pass
@@ -2609,7 +2604,7 @@ x = ''
 reveal_type(x) # N: Revealed type is "builtins.str"
 
 [case testLocalPartialTypesWithGlobalInitializedToNoneStrictOptional]
-# flags: --local-partial-types --strict-optional
+# flags: --local-partial-types
 x = None
 
 def f() -> None:
@@ -2761,7 +2756,7 @@ class B(A):
 reveal_type(B.x) # N: Revealed type is "None"
 
 [case testLocalPartialTypesWithInheritance2]
-# flags: --local-partial-types --strict-optional
+# flags: --local-partial-types
 class A:
     x: str
 
@@ -2769,7 +2764,7 @@ class B(A):
     x = None  # E: Incompatible types in assignment (expression has type "None", base class "A" defined the type as "str")
 
 [case testLocalPartialTypesWithAnyBaseClass]
-# flags: --local-partial-types --strict-optional
+# flags: --local-partial-types
 from typing import Any
 
 A: Any
@@ -2781,7 +2776,7 @@ class C(B):
     y = None
 
 [case testLocalPartialTypesInMultipleMroItems]
-# flags: --local-partial-types --strict-optional
+# flags: --local-partial-types
 from typing import Optional
 
 class A:
@@ -3106,7 +3101,6 @@ class B(A):
     x = 2  # E: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "str")
 
 [case testInheritedAttributeStrictOptional]
-# flags: --strict-optional
 class A:
     x: str
 
@@ -3209,7 +3203,6 @@ x: Inv[int]
 reveal_type(f(x))  # N: Revealed type is "builtins.int"
 
 [case testOptionalTypeVarAgainstOptional]
-# flags: --strict-optional
 from typing import Optional, TypeVar, Iterable, Iterator, List
 
 _T = TypeVar('_T')
@@ -3256,7 +3249,6 @@ reveal_type(b) # N: Revealed type is "collections.defaultdict[builtins.int, buil
 [builtins fixtures/dict.pyi]
 
 [case testPartialDefaultDictListValueStrictOptional]
-# flags: --strict-optional
 from collections import defaultdict
 a = defaultdict(list)
 a['x'].append(1)
@@ -3333,7 +3325,6 @@ def g() -> None: pass
 reveal_type(f(g))  # N: Revealed type is "None"
 
 [case testInferCallableReturningNone2]
-# flags: --strict-optional
 from typing import Callable, TypeVar
 
 T = TypeVar("T")
@@ -3404,7 +3395,6 @@ def collection_from_dict_value(model: Type[T2]) -> None:
 [builtins fixtures/isinstancelist.pyi]
 
 [case testRegression11705_Strict]
-# flags: --strict-optional
 # See: https://github.com/python/mypy/issues/11705
 from typing import Dict, Optional, NamedTuple
 class C(NamedTuple):
@@ -3454,7 +3444,6 @@ foo(("a", {"a": "b"}, "b"))
 [builtins fixtures/dict.pyi]
 
 [case testUseSupertypeAsInferenceContext]
-# flags: --strict-optional
 from typing import List, Optional
 
 class B:
diff --git a/test-data/unit/check-inline-config.test b/test-data/unit/check-inline-config.test
index 71030b5c9b97..bedba811d95b 100644
--- a/test-data/unit/check-inline-config.test
+++ b/test-data/unit/check-inline-config.test
@@ -165,7 +165,6 @@ main:1: error: Unrecognized option: skip_file = True
 main:1: error: Setting "strict" not supported in inline configuration: specify it in a configuration file instead, or set individual inline flags (see "mypy -h" for the list of flags enabled in strict mode)
 
 [case testInlineErrorCodes]
-# flags: --strict-optional
 # mypy: enable-error-code="ignore-without-code,truthy-bool"
 class Foo:
     pass
@@ -175,7 +174,7 @@ if foo: ...  # E: "__main__.foo" has type "Foo" which does not implement __bool_
 42 + "no"  # type: ignore  # E: "type: ignore" comment without error code (consider "type: ignore[operator]" instead)
 
 [case testInlineErrorCodesOverrideConfig]
-# flags: --strict-optional --config-file tmp/mypy.ini
+# flags: --config-file tmp/mypy.ini
 import foo
 import tests.bar
 import tests.baz
@@ -243,7 +242,6 @@ class C:
         self.x = 1
 
 [case testIgnoreErrorsWithUnsafeSuperCall_no_empty]
-# flags: --strict-optional
 
 from m import C
 
diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test
index 3403e726d8b5..361d4db78752 100644
--- a/test-data/unit/check-isinstance.test
+++ b/test-data/unit/check-isinstance.test
@@ -1801,7 +1801,6 @@ if issubclass(fm, Bar):
 [builtins fixtures/isinstance.pyi]
 
 [case testIssubclassWithMetaclassesStrictOptional]
-# flags: --strict-optional
 class FooMetaclass(type): ...
 class BarMetaclass(type): ...
 class Foo(metaclass=FooMetaclass): ...
@@ -1906,7 +1905,6 @@ def narrow_any_to_str_then_reassign_to_int() -> None:
 [builtins fixtures/isinstance.pyi]
 
 [case testNarrowTypeAfterInList]
-# flags: --strict-optional
 from typing import List, Optional
 
 x: List[int]
@@ -1924,7 +1922,6 @@ else:
 [out]
 
 [case testNarrowTypeAfterInListOfOptional]
-# flags: --strict-optional
 from typing import List, Optional
 
 x: List[Optional[int]]
@@ -1938,7 +1935,6 @@ else:
 [out]
 
 [case testNarrowTypeAfterInListNonOverlapping]
-# flags: --strict-optional
 from typing import List, Optional
 
 x: List[str]
@@ -1952,7 +1948,6 @@ else:
 [out]
 
 [case testNarrowTypeAfterInListNested]
-# flags: --strict-optional
 from typing import List, Optional, Any
 
 x: Optional[int]
@@ -1967,7 +1962,6 @@ if x in nested_any:
 [out]
 
 [case testNarrowTypeAfterInTuple]
-# flags: --strict-optional
 from typing import Optional
 class A: pass
 class B(A): pass
@@ -1982,7 +1976,6 @@ else:
 [out]
 
 [case testNarrowTypeAfterInNamedTuple]
-# flags: --strict-optional
 from typing import NamedTuple, Optional
 class NT(NamedTuple):
     x: int
@@ -1998,7 +1991,6 @@ else:
 [out]
 
 [case testNarrowTypeAfterInDict]
-# flags: --strict-optional
 from typing import Dict, Optional
 x: Dict[str, int]
 y: Optional[str]
@@ -2015,7 +2007,6 @@ else:
 [out]
 
 [case testNarrowTypeAfterInNoAnyOrObject]
-# flags: --strict-optional
 from typing import Any, List, Optional
 x: List[Any]
 z: List[object]
@@ -2035,7 +2026,6 @@ else:
 [out]
 
 [case testNarrowTypeAfterInUserDefined]
-# flags: --strict-optional
 from typing import Container, Optional
 
 class C(Container[int]):
@@ -2057,7 +2047,6 @@ else:
 [out]
 
 [case testNarrowTypeAfterInSet]
-# flags: --strict-optional
 from typing import Optional, Set
 s: Set[str]
 
@@ -2074,7 +2063,6 @@ else:
 [out]
 
 [case testNarrowTypeAfterInTypedDict]
-# flags: --strict-optional
 from typing import Optional
 from mypy_extensions import TypedDict
 class TD(TypedDict):
@@ -2150,7 +2138,6 @@ else:
 [builtins fixtures/isinstance.pyi]
 
 [case testIsInstanceInitialNoneCheckSkipsImpossibleCasesNoStrictOptional]
-# flags: --strict-optional
 from typing import Optional, Union
 
 class A: pass
@@ -2197,7 +2184,6 @@ def foo2(x: Optional[str]) -> None:
 [builtins fixtures/isinstance.pyi]
 
 [case testNoneCheckDoesNotNarrowWhenUsingTypeVars]
-# flags: --strict-optional
 
 # Note: this test (and the following one) are testing checker.conditional_type_map:
 # if you set the 'prohibit_none_typevar_overlap' keyword argument to False when calling
@@ -2249,7 +2235,6 @@ def bar(x: Union[List[str], List[int], None]) -> None:
 [builtins fixtures/isinstancelist.pyi]
 
 [case testNoneAndGenericTypesOverlapStrictOptional]
-# flags: --strict-optional
 from typing import Union, Optional, List
 
 # This test is the same as the one above, except for strict-optional.
diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test
index b3ee47aa6fdf..4beac047e278 100644
--- a/test-data/unit/check-kwargs.test
+++ b/test-data/unit/check-kwargs.test
@@ -350,7 +350,6 @@ class A: pass
 [builtins fixtures/dict.pyi]
 
 [case testInvalidTypeForKeywordVarArg]
-# flags: --strict-optional
 from typing import Dict, Any, Optional
 class A: pass
 def f(**kwargs: 'A') -> None: pass
diff --git a/test-data/unit/check-lists.test b/test-data/unit/check-lists.test
index 9809024afdbb..77acdafd3319 100644
--- a/test-data/unit/check-lists.test
+++ b/test-data/unit/check-lists.test
@@ -89,7 +89,6 @@ reveal_type(c)  # N: Revealed type is "builtins.list[builtins.int]"
 [builtins fixtures/list.pyi]
 
 [case testComprehensionShadowBinder]
-# flags: --strict-optional
 def foo(x: object) -> None:
     if isinstance(x, str):
         [reveal_type(x) for x in [1, 2, 3]]  # N: Revealed type is "builtins.int"
diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test
index abdbf733a679..f63f4026c4b6 100644
--- a/test-data/unit/check-literal.test
+++ b/test-data/unit/check-literal.test
@@ -659,7 +659,6 @@ def foo(b: Literal[T]) -> Tuple[T]: pass   # E: Parameter 1 of Literal[...] is i
 --
 
 [case testLiteralMultipleValues]
-# flags: --strict-optional
 from typing_extensions import Literal
 a: Literal[1, 2, 3]
 b: Literal["a", "b", "c"]
@@ -689,7 +688,6 @@ reveal_type(b)  # N: Revealed type is "Union[Literal[1], Literal[2], Literal[3]]
 [out]
 
 [case testLiteralNestedUsage]
-# flags: --strict-optional
 
 from typing_extensions import Literal
 a: Literal[Literal[3], 4, Literal["foo"]]
@@ -818,7 +816,6 @@ foo(c)  # E: Argument 1 to "foo" has incompatible type "Literal[4, 'foo']"; expe
 [out]
 
 [case testLiteralCheckSubtypingStrictOptional]
-# flags: --strict-optional
 from typing import Any, NoReturn
 from typing_extensions import Literal
 
@@ -1807,7 +1804,6 @@ reveal_type(unify(f6))  # N: Revealed type is "None"
 [out]
 
 [case testLiteralMeetsWithStrictOptional]
-# flags: --strict-optional
 from typing import TypeVar, Callable, Union
 from typing_extensions import Literal
 
@@ -1834,7 +1830,6 @@ reveal_type(unify(func))  # N: Revealed type is "<nothing>"
 --
 
 [case testLiteralIntelligentIndexingTuples]
-# flags: --strict-optional
 from typing import Tuple, NamedTuple, Optional, Final
 from typing_extensions import Literal
 
@@ -2247,7 +2242,6 @@ force4(reveal_type(f.instancevar4))  # N: Revealed type is "None"
 [out]
 
 [case testLiteralFinalErasureInMutableDatastructures1]
-# flags: --strict-optional
 from typing_extensions import Final
 
 var1: Final = [0, None]
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
index 4992b6589bb3..bdf860cba89d 100644
--- a/test-data/unit/check-modules.test
+++ b/test-data/unit/check-modules.test
@@ -567,7 +567,6 @@ x = 1
 x = 1
 
 [case testAssignToFuncDefViaImport]
-# flags: --strict-optional
 
 # Errors differ with the new analyzer. (Old analyzer gave error on the
 # input, which is maybe better, but no error about f, which seems
diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test
index f06af0057f0f..22014d4c645c 100644
--- a/test-data/unit/check-narrowing.test
+++ b/test-data/unit/check-narrowing.test
@@ -747,7 +747,6 @@ def test3(switch: FlipFlopEnum) -> None:
 [builtins fixtures/primitives.pyi]
 
 [case testNarrowingEqualityRequiresExplicitStrLiteral]
-# flags: --strict-optional
 from typing_extensions import Literal, Final
 
 A_final: Final = "A"
@@ -794,7 +793,6 @@ reveal_type(x_union)      # N: Revealed type is "Union[Literal['A'], Literal['B'
 [builtins fixtures/primitives.pyi]
 
 [case testNarrowingEqualityRequiresExplicitEnumLiteral]
-# flags: --strict-optional
 from typing import Union
 from typing_extensions import Literal, Final
 from enum import Enum
@@ -879,7 +877,7 @@ else:
 [builtins fixtures/primitives.pyi]
 
 [case testNarrowingEqualityDisabledForCustomEqualityChain]
-# flags: --strict-optional --strict-equality --warn-unreachable
+# flags: --strict-equality --warn-unreachable
 from typing import Union
 from typing_extensions import Literal
 
@@ -916,7 +914,7 @@ else:
 [builtins fixtures/primitives.pyi]
 
 [case testNarrowingUnreachableCases]
-# flags: --strict-optional --strict-equality --warn-unreachable
+# flags: --strict-equality --warn-unreachable
 from typing import Union
 from typing_extensions import Literal
 
@@ -964,7 +962,7 @@ else:
 [builtins fixtures/primitives.pyi]
 
 [case testNarrowingUnreachableCases2]
-# flags: --strict-optional --strict-equality --warn-unreachable
+# flags: --strict-equality --warn-unreachable
 from typing import Union
 from typing_extensions import Literal
 
@@ -1064,7 +1062,6 @@ else:
 [builtins fixtures/primitives.pyi]
 
 [case testNarrowingBooleanIdentityCheck]
-# flags: --strict-optional
 from typing import Optional
 from typing_extensions import Literal
 
@@ -1087,7 +1084,6 @@ else:
 [builtins fixtures/primitives.pyi]
 
 [case testNarrowingBooleanTruthiness]
-# flags: --strict-optional
 from typing import Optional
 from typing_extensions import Literal
 
@@ -1109,7 +1105,6 @@ reveal_type(opt_bool_val)   # N: Revealed type is "Union[builtins.bool, None]"
 [builtins fixtures/primitives.pyi]
 
 [case testNarrowingBooleanBoolOp]
-# flags: --strict-optional
 from typing import Optional
 from typing_extensions import Literal
 
@@ -1161,7 +1156,6 @@ def f(d: Union[Foo, Bar]) -> None:
 [builtins fixtures/dict.pyi]
 
 [case testNarrowingUsingMetaclass]
-# flags: --strict-optional
 from typing import Type
 
 class M(type):
@@ -1181,7 +1175,6 @@ def f(t: Type[C]) -> None:
     reveal_type(t)  # N: Revealed type is "Type[__main__.C]"
 
 [case testNarrowingUsingTypeVar]
-# flags: --strict-optional
 from typing import Type, TypeVar
 
 class A: pass
diff --git a/test-data/unit/check-native-int.test b/test-data/unit/check-native-int.test
index 1129512694f4..30314eebcb31 100644
--- a/test-data/unit/check-native-int.test
+++ b/test-data/unit/check-native-int.test
@@ -69,7 +69,6 @@ reveal_type(join(a, n64))  # N: Revealed type is "Any"
 [builtins fixtures/dict.pyi]
 
 [case testNativeIntMeets]
-# flags: --strict-optional
 from typing import TypeVar, Callable, Any
 from mypy_extensions import i32, i64
 
@@ -130,7 +129,6 @@ reveal_type(y)  # N: Revealed type is "builtins.int"
 [builtins fixtures/dict.pyi]
 
 [case testNativeIntFloatConversion]
-# flags: --strict-optional
 from typing import TypeVar, Callable
 from mypy_extensions import i32
 
diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test
index 0815d7af1933..3ed4c6d3d8e2 100644
--- a/test-data/unit/check-newsyntax.test
+++ b/test-data/unit/check-newsyntax.test
@@ -49,20 +49,17 @@ TstInstance().a = 'ab'
 [out]
 
 [case testNewSyntaxWithClassVars]
-# flags: --strict-optional
 class CCC:
     a: str = None  # E: Incompatible types in assignment (expression has type "None", variable has type "str")
 [out]
 
 [case testNewSyntaxWithStrictOptional]
-# flags: --strict-optional
 strict: int
 strict = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 strict2: int = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
 [out]
 
 [case testNewSyntaxWithStrictOptionalFunctions]
-# flags: --strict-optional
 def f() -> None:
     x: int
     if int():
@@ -70,7 +67,6 @@ def f() -> None:
 [out]
 
 [case testNewSyntaxWithStrictOptionalClasses]
-# flags: --strict-optional
 class C:
     def meth(self) -> None:
         x: int = None  # E: Incompatible types in assignment (expression has type "None", variable has type "int")
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index 4851cc96e6da..f49a15ada85c 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -3264,7 +3264,6 @@ f(x, B())  # E: Argument 1 to "f" has incompatible type "Union[A, B]"; expected
 [builtins fixtures/tuple.pyi]
 
 [case testOverloadInferUnionWithMixOfPositionalAndOptionalArgs]
-# flags: --strict-optional
 from typing import overload, Union, Optional
 
 class A: ...
@@ -3603,7 +3602,6 @@ reveal_type(g(b))  # N: Revealed type is "builtins.str"
 reveal_type(g(c))  # N: Revealed type is "builtins.str"
 
 [case testOverloadsAndNoneWithStrictOptional]
-# flags: --strict-optional
 from typing import overload, Optional
 
 @overload
@@ -3651,7 +3649,6 @@ reveal_type(mymap(f3, seq))  # N: Revealed type is "typing.Iterable[builtins.str
 [typing fixtures/typing-medium.pyi]
 
 [case testOverloadsNoneAndTypeVarsWithStrictOptional]
-# flags: --strict-optional
 from typing import Callable, Iterable, TypeVar, overload, Optional
 
 T = TypeVar('T')
@@ -3708,7 +3705,6 @@ def test_narrow_int() -> None:
 [typing fixtures/typing-medium.pyi]
 
 [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional1]
-# flags: --strict-optional
 from typing import overload, Union, NoReturn
 
 @overload
@@ -3772,7 +3768,6 @@ def test_narrow_none() -> None:
 [typing fixtures/typing-medium.pyi]
 
 [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional2]
-# flags: --strict-optional
 from typing import overload, Union, TypeVar, NoReturn, Optional
 
 T = TypeVar('T')
@@ -3836,7 +3831,6 @@ def test_narrow_none_v2() -> None:
 [typing fixtures/typing-medium.pyi]
 
 [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional3]
-# flags: --strict-optional
 from typing import overload, TypeVar, NoReturn, Optional
 
 @overload
@@ -4648,7 +4642,6 @@ def none_second(x: int) -> int:
     return x
 
 [case testOverloadsWithNoneComingSecondIsOkInStrictOptional]
-# flags: --strict-optional
 from typing import overload, Optional
 
 @overload
@@ -4672,8 +4665,8 @@ def none_loose_impl(x: int) -> int: ...
 def none_loose_impl(x: int) -> int:
     return x
 [out]
-main:22: error: Overloaded function implementation does not accept all possible arguments of signature 1
-main:22: error: Overloaded function implementation cannot produce return type of signature 1
+main:21: error: Overloaded function implementation does not accept all possible arguments of signature 1
+main:21: error: Overloaded function implementation cannot produce return type of signature 1
 
 [case testTooManyUnionsException]
 from typing import overload, Union
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index 3d05faed74f1..114fe1f8438a 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1307,7 +1307,6 @@ reveal_type(bar(C(fn=foo, x=1)))  # N: Revealed type is "__main__.C[[x: builtins
 [builtins fixtures/paramspec.pyi]
 
 [case testParamSpecClassConstructor]
-# flags: --strict-optional
 from typing import ParamSpec, Callable
 
 P = ParamSpec("P")
diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test
index 88a541c28ac2..913584224764 100644
--- a/test-data/unit/check-plugin-attrs.test
+++ b/test-data/unit/check-plugin-attrs.test
@@ -1199,7 +1199,6 @@ class C:
 [builtins fixtures/bool.pyi]
 
 [case testAttrsOptionalConverter]
-# flags: --strict-optional
 import attr
 from attr.converters import optional
 from typing import Optional
@@ -1219,7 +1218,6 @@ A(None, None)
 [builtins fixtures/plugin_attrs.pyi]
 
 [case testAttrsOptionalConverterNewPackage]
-# flags: --strict-optional
 import attrs
 from attrs.converters import optional
 from typing import Optional
diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test
index 5d5ba54304a3..dba01be50fee 100644
--- a/test-data/unit/check-protocols.test
+++ b/test-data/unit/check-protocols.test
@@ -324,7 +324,6 @@ var: MyHashable = C()  # E: Incompatible types in assignment (expression has typ
                        # N:     __my_hash__: expected "Callable[[], int]", got "None"
 
 [case testNoneDisablesProtocolSubclassingWithStrictOptional]
-# flags: --strict-optional
 from typing import Protocol
 
 class MyHashable(Protocol):
@@ -336,7 +335,6 @@ class C(MyHashable):
 (expression has type "None", base class "MyHashable" defined the type as "Callable[[MyHashable], int]")
 
 [case testProtocolsWithNoneAndStrictOptional]
-# flags: --strict-optional
 from typing import Protocol
 class P(Protocol):
     x = 0  # type: int
@@ -348,12 +346,12 @@ x: P = C() # Error!
 def f(x: P) -> None: pass
 f(C()) # Error!
 [out]
-main:9: error: Incompatible types in assignment (expression has type "C", variable has type "P")
-main:9: note: Following member(s) of "C" have conflicts:
-main:9: note:     x: expected "int", got "None"
-main:11: error: Argument 1 to "f" has incompatible type "C"; expected "P"
-main:11: note: Following member(s) of "C" have conflicts:
-main:11: note:     x: expected "int", got "None"
+main:8: error: Incompatible types in assignment (expression has type "C", variable has type "P")
+main:8: note: Following member(s) of "C" have conflicts:
+main:8: note:     x: expected "int", got "None"
+main:10: error: Argument 1 to "f" has incompatible type "C"; expected "P"
+main:10: note: Following member(s) of "C" have conflicts:
+main:10: note:     x: expected "int", got "None"
 
 -- Semanal errors in protocol types
 -- --------------------------------
@@ -2412,7 +2410,6 @@ x: P = None
 [out]
 
 [case testNoneSubtypeOfEmptyProtocolStrict]
-# flags: --strict-optional
 from typing import Protocol
 class P(Protocol):
     pass
@@ -2959,7 +2956,6 @@ class MyClass:
 
 
 [case testPartialAttributeNoneTypeStrictOptional]
-# flags: --strict-optional
 from typing import Optional, Protocol, runtime_checkable
 
 @runtime_checkable
@@ -3080,7 +3076,6 @@ def round(number: SupportsRound[_T], ndigits: int) -> _T: ...
 round(C(), 1)
 
 [case testEmptyBodyImplicitlyAbstractProtocol]
-# flags: --strict-optional
 from typing import Protocol, overload, Union
 
 class P1(Protocol):
@@ -3127,7 +3122,6 @@ C3()
 [builtins fixtures/classmethod.pyi]
 
 [case testEmptyBodyImplicitlyAbstractProtocolProperty]
-# flags: --strict-optional
 from typing import Protocol
 
 class P1(Protocol):
@@ -3222,7 +3216,6 @@ D() # E: Cannot instantiate abstract class "D" with abstract attribute "meth"
 [builtins fixtures/exception.pyi]
 
 [case testEmptyBodyNoneCompatibleProtocol]
-# flags: --strict-optional
 from abc import abstractmethod
 from typing import Any, Optional, Protocol, Union, overload
 from typing_extensions import TypeAlias
diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test
index 6416fa02bbce..c07a90b49e63 100644
--- a/test-data/unit/check-python310.test
+++ b/test-data/unit/check-python310.test
@@ -1140,7 +1140,6 @@ match m:
         reveal_type(a)
 
 [case testMatchRedefiningPatternGuard]
-# flags: --strict-optional
 m: str
 
 match m:
@@ -1382,7 +1381,6 @@ def f(x: int | str) -> int:  # E: Missing return statement
 [builtins fixtures/isinstance.pyi]
 
 [case testMatchNarrowDownUnionPartially]
-# flags: --strict-optional
 
 def f(x: int | str) -> None:
     match x:
@@ -1493,7 +1491,6 @@ def f(x: A) -> None:
     reveal_type(y)  # N: Revealed type is "Union[__main__.<subclass of "A" and "B">, __main__.<subclass of "A" and "C">]"
 
 [case testMatchWithBreakAndContinue]
-# flags: --strict-optional
 def f(x: int | str | None) -> None:
     i = int()
     while i:
@@ -1626,7 +1623,6 @@ def func(e: Union[str, tuple[str]]) -> None:
 [builtins fixtures/tuple.pyi]
 
 [case testMatchTupleOptionalNoCrash]
-# flags: --strict-optional
 foo: tuple[int] | None
 match foo:
     case x,:
@@ -1865,7 +1861,6 @@ def f() -> None:
             reveal_type(y.a)  # N: Revealed type is "builtins.int"
 
 [case testNarrowedVariableInNestedModifiedInMatch]
-# flags: --strict-optional
 from typing import Optional
 
 def match_stmt_error1(x: Optional[str]) -> None:
diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test
index 423daaf5ae8f..d83f29f2186a 100644
--- a/test-data/unit/check-python38.test
+++ b/test-data/unit/check-python38.test
@@ -223,7 +223,7 @@ h(arg=0)  # E: Unexpected keyword argument "arg" for "h"
 i(arg=0)  # E: Unexpected keyword argument "arg"
 
 [case testWalrus]
-# flags: --strict-optional --python-version 3.8
+# flags: --python-version 3.8
 from typing import NamedTuple, Optional, List
 from typing_extensions import Final
 
@@ -427,7 +427,7 @@ else:
 [builtins fixtures/list.pyi]
 
 [case testWalrusConditionalTypeCheck]
-# flags: --strict-optional --python-version 3.8
+# flags: --python-version 3.8
 from typing import Optional
 
 maybe_str: Optional[str]
@@ -729,7 +729,6 @@ def f1() -> None:
 [builtins fixtures/dict.pyi]
 
 [case testNarrowOnSelfInGeneric]
-# flags: --strict-optional
 from typing import Generic, TypeVar, Optional
 
 T = TypeVar("T", int, str)
@@ -741,8 +740,8 @@ class C(Generic[T]):
             reveal_type(y)
         return None
 [out]
-main:10: note: Revealed type is "builtins.int"
-main:10: note: Revealed type is "builtins.str"
+main:9: note: Revealed type is "builtins.int"
+main:9: note: Revealed type is "builtins.str"
 
 [case testTypeGuardWithPositionalOnlyArg]
 # flags: --python-version 3.8
@@ -778,7 +777,6 @@ class C:
 [builtins fixtures/list.pyi]
 
 [case testNarrowedVariableInNestedModifiedInWalrus]
-# flags: --strict-optional
 from typing import Optional
 
 def walrus_with_nested_error(x: Optional[str]) -> None:
diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test
index dc1ae448c0d1..84593933a2de 100644
--- a/test-data/unit/check-recursive-types.test
+++ b/test-data/unit/check-recursive-types.test
@@ -422,7 +422,6 @@ reveal_type(d)  # N: Revealed type is "Any"
 [builtins fixtures/isinstancelist.pyi]
 
 [case testBasicRecursiveNamedTuple]
-# flags: --strict-optional
 from typing import NamedTuple, Optional
 
 NT = NamedTuple("NT", [("x", Optional[NT]), ("y", int)])
@@ -457,7 +456,6 @@ reveal_type(f(tnt, nt))  # N: Revealed type is "builtins.tuple[Any, ...]"
 [builtins fixtures/tuple.pyi]
 
 [case testBasicRecursiveNamedTupleClass]
-# flags: --strict-optional
 from typing import NamedTuple, Optional
 
 class NT(NamedTuple):
@@ -684,7 +682,6 @@ itd2 = TD(x=0, y=TD(x=0, y=TD(x=0, y=None)))
 [typing fixtures/typing-typeddict.pyi]
 
 [case testRecursiveTypedDictMethods]
-# flags: --strict-optional
 from typing import TypedDict
 
 class TD(TypedDict, total=False):
@@ -787,7 +784,6 @@ reveal_type(std)  # N: Revealed type is "TypedDict('__main__.STD', {'val': built
 [typing fixtures/typing-typeddict.pyi]
 
 [case testRecursiveClassLevelAlias]
-# flags: --strict-optional
 from typing import Union, Sequence
 
 class A:
diff --git a/test-data/unit/check-serialize.test b/test-data/unit/check-serialize.test
index e5d1d6b170f9..81da94c0591c 100644
--- a/test-data/unit/check-serialize.test
+++ b/test-data/unit/check-serialize.test
@@ -740,7 +740,6 @@ main:4: note: Revealed type is "def (x: builtins.int) -> Tuple[builtins.int, fal
 --
 
 [case testSerializeOptionalType]
-# flags: --strict-optional
 import a
 [file a.py]
 import b
diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test
index 3bfcf6a9afea..42f22e89d6b7 100644
--- a/test-data/unit/check-type-aliases.test
+++ b/test-data/unit/check-type-aliases.test
@@ -305,7 +305,6 @@ reveal_type(y)  # N: Revealed type is "Union[builtins.int, None]"
 [builtins fixtures/bool.pyi]
 
 [case testNoneAliasStrict]
-# flags: --strict-optional
 from typing import Optional, Union
 void = type(None)
 x: int
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index 983fa8c17aec..7de8e6416f35 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -595,7 +595,6 @@ reveal_type(f(g))  # N: Revealed type is "TypedDict({'x': builtins.int, 'y': bui
 [builtins fixtures/dict.pyi]
 
 [case testMeetOfTypedDictsWithIncompatibleCommonKeysIsUninhabited]
-# flags: --strict-optional
 from mypy_extensions import TypedDict
 from typing import TypeVar, Callable
 XYa = TypedDict('XYa', {'x': int, 'y': int})
@@ -619,7 +618,6 @@ reveal_type(f(g))  # N: Revealed type is "TypedDict({'x': builtins.int, 'z': bui
 
 # TODO: It would be more accurate for the meet to be TypedDict instead.
 [case testMeetOfTypedDictWithCompatibleMappingIsUninhabitedForNow]
-# flags: --strict-optional
 from mypy_extensions import TypedDict
 from typing import TypeVar, Callable, Mapping
 X = TypedDict('X', {'x': int})
@@ -631,7 +629,6 @@ reveal_type(f(g))  # N: Revealed type is "<nothing>"
 [builtins fixtures/dict.pyi]
 
 [case testMeetOfTypedDictWithIncompatibleMappingIsUninhabited]
-# flags: --strict-optional
 from mypy_extensions import TypedDict
 from typing import TypeVar, Callable, Mapping
 X = TypedDict('X', {'x': int})
@@ -643,7 +640,6 @@ reveal_type(f(g))  # N: Revealed type is "<nothing>"
 [builtins fixtures/dict.pyi]
 
 [case testMeetOfTypedDictWithCompatibleMappingSuperclassIsUninhabitedForNow]
-# flags: --strict-optional
 from mypy_extensions import TypedDict
 from typing import TypeVar, Callable, Iterable
 X = TypedDict('X', {'x': int})
@@ -677,7 +673,6 @@ reveal_type(f(g))  # N: Revealed type is "TypedDict({'x'?: builtins.int, 'y': bu
 [builtins fixtures/dict.pyi]
 
 [case testMeetOfTypedDictsWithIncompatibleNonTotalAndTotal]
-# flags: --strict-optional
 from mypy_extensions import TypedDict
 from typing import TypeVar, Callable
 XY = TypedDict('XY', {'x': int, 'y': int}, total=False)
@@ -972,7 +967,6 @@ if int():
 -- Other TypedDict methods
 
 [case testTypedDictGetMethod]
-# flags: --strict-optional
 from mypy_extensions import TypedDict
 class A: pass
 D = TypedDict('D', {'x': int, 'y': str})
@@ -986,7 +980,6 @@ reveal_type(d.get('y', None)) # N: Revealed type is "Union[builtins.str, None]"
 [typing fixtures/typing-typeddict.pyi]
 
 [case testTypedDictGetMethodTypeContext]
-# flags: --strict-optional
 from typing import List
 from mypy_extensions import TypedDict
 class A: pass
@@ -1044,7 +1037,6 @@ p.get('x', 1 + 'y')     # E: Unsupported operand types for + ("int" and "str")
 [typing fixtures/typing-typeddict.pyi]
 
 [case testTypedDictChainedGetWithEmptyDictDefault]
-# flags: --strict-optional
 from mypy_extensions import TypedDict
 C = TypedDict('C', {'a': int})
 D = TypedDict('D', {'x': C, 'y': str})
diff --git a/test-data/unit/check-typeguard.test b/test-data/unit/check-typeguard.test
index a307e4c8b6a0..b3b168e5c7c6 100644
--- a/test-data/unit/check-typeguard.test
+++ b/test-data/unit/check-typeguard.test
@@ -248,7 +248,6 @@ def main1(a: object) -> None:
 [builtins fixtures/tuple.pyi]
 
 [case testTypeGuardOverload]
-# flags: --strict-optional
 from typing import overload, Any, Callable, Iterable, Iterator, List, Optional, TypeVar
 from typing_extensions import TypeGuard
 
diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test
index 28d83aa54ccc..f6fd27e59e4d 100644
--- a/test-data/unit/check-unions.test
+++ b/test-data/unit/check-unions.test
@@ -929,7 +929,6 @@ reveal_type(z) # N: Revealed type is "Union[builtins.int, __main__.A, builtins.s
 [out]
 
 [case testUnpackUnionNoCrashOnPartialNone]
-# flags: --strict-optional
 from typing import Dict, Tuple, List, Any
 
 a: Any
@@ -944,7 +943,6 @@ if x:
 [out]
 
 [case testUnpackUnionNoCrashOnPartialNone2]
-# flags: --strict-optional
 from typing import Dict, Tuple, List, Any
 
 a: Any
@@ -960,7 +958,6 @@ if x:
 [out]
 
 [case testUnpackUnionNoCrashOnPartialNoneBinder]
-# flags: --strict-optional
 from typing import Dict, Tuple, List, Any
 
 x: object
@@ -975,7 +972,6 @@ if x:
 [out]
 
 [case testUnpackUnionNoCrashOnPartialList]
-# flags: --strict-optional
 from typing import Dict, Tuple, List, Any
 
 a: Any
@@ -1081,7 +1077,6 @@ def bar(a: T4, b: T4) -> T4:  # test multi-level alias
 [builtins fixtures/ops.pyi]
 
 [case testJoinUnionWithUnionAndAny]
-# flags: --strict-optional
 from typing import TypeVar, Union, Any
 T = TypeVar("T")
 def f(x: T, y: T) -> T:
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
index 82ff35f53702..76ecd9f51e35 100644
--- a/test-data/unit/check-unreachable-code.test
+++ b/test-data/unit/check-unreachable-code.test
@@ -615,7 +615,6 @@ reveal_type(x)  # N: Revealed type is "__main__.B"
 [typing fixtures/typing-medium.pyi]
 
 [case testUnreachableWhenSuperclassIsAny]
-# flags: --strict-optional
 from typing import Any
 
 # This can happen if we're importing a class from a missing module
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
index 4da9e0e5033e..6e118597551f 100644
--- a/test-data/unit/check-varargs.test
+++ b/test-data/unit/check-varargs.test
@@ -630,7 +630,6 @@ if int():
 [builtins fixtures/list.pyi]
 
 [case testCallerTupleVarArgsAndGenericCalleeVarArg]
-# flags: --strict-optional
 from typing import TypeVar
 
 T = TypeVar('T')
diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test
index 10c7968be475..90f40777d6b7 100644
--- a/test-data/unit/check-warnings.test
+++ b/test-data/unit/check-warnings.test
@@ -207,7 +207,7 @@ def f() -> Any: return g()
 [out]
 
 [case testOKReturnAnyIfProperSubtype]
-# flags: --warn-return-any --strict-optional
+# flags: --warn-return-any
 from typing import Any, Optional
 
 class Test(object):
diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test
index b43a2ace5eed..c3295b79e4ed 100644
--- a/test-data/unit/deps.test
+++ b/test-data/unit/deps.test
@@ -612,7 +612,6 @@ class A:
 <m.C> -> <m.A.x>, m.A.f, m.C
 
 [case testPartialNoneTypeAttributeCrash2]
-# flags: --strict-optional
 class C: pass
 
 class A:
diff --git a/test-data/unit/fine-grained-suggest.test b/test-data/unit/fine-grained-suggest.test
index 47de16b8d765..02373091ad54 100644
--- a/test-data/unit/fine-grained-suggest.test
+++ b/test-data/unit/fine-grained-suggest.test
@@ -62,7 +62,6 @@ foo('3', '4')
 ==
 
 [case testSuggestInferFunc1]
-# flags: --strict-optional
 # suggest: foo.foo
 [file foo.py]
 def foo(arg, lol=None):
@@ -85,7 +84,6 @@ def untyped(x) -> None:
 ==
 
 [case testSuggestInferFunc2]
-# flags: --strict-optional
 # suggest: foo.foo
 [file foo.py]
 def foo(arg):
@@ -222,7 +220,6 @@ Foo('lol')
 ==
 
 [case testSuggestInferMethod1]
-# flags: --strict-optional
 # suggest: --no-any foo.Foo.foo
 [file foo.py]
 class Foo:
@@ -248,7 +245,6 @@ def bar() -> None:
 ==
 
 [case testSuggestInferMethod2]
-# flags: --strict-optional
 # suggest: foo.Foo.foo
 [file foo.py]
 class Foo:
@@ -275,7 +271,6 @@ def bar() -> None:
 ==
 
 [case testSuggestInferMethod3]
-# flags: --strict-optional
 # suggest2: foo.Foo.foo
 [file foo.py]
 class Foo:
@@ -372,7 +367,6 @@ def has_nested(x):
 ==
 
 [case testSuggestInferFunctionUnreachable]
-# flags: --strict-optional
 # suggest: foo.foo
 [file foo.py]
 import sys
@@ -390,7 +384,6 @@ foo('test')
 ==
 
 [case testSuggestInferMethodStep2]
-# flags: --strict-optional
 # suggest2: foo.Foo.foo
 [file foo.py]
 class Foo:
@@ -417,7 +410,6 @@ def bar() -> None:
 (Union[str, int, None], Optional[int]) -> Union[int, str]
 
 [case testSuggestInferNestedMethod]
-# flags: --strict-optional
 # suggest: foo.Foo.Bar.baz
 [file foo.py]
 class Foo:
@@ -435,7 +427,6 @@ def bar() -> None:
 ==
 
 [case testSuggestCallable]
-# flags: --strict-optional
 # suggest: foo.foo
 # suggest: foo.bar
 # suggest: --flex-any=0.9 foo.bar
@@ -483,7 +474,6 @@ No guesses that match criteria!
 ==
 
 [case testSuggestNewSemanal]
-# flags: --strict-optional
 # suggest: foo.Foo.foo
 # suggest: foo.foo
 [file foo.py]
@@ -521,7 +511,6 @@ def baz() -> None:
 ==
 
 [case testSuggestInferFuncDecorator1]
-# flags: --strict-optional
 # suggest: foo.foo
 [file foo.py]
 from typing import TypeVar
@@ -543,7 +532,6 @@ def bar() -> None:
 ==
 
 [case testSuggestInferFuncDecorator2]
-# flags: --strict-optional
 # suggest: foo.foo
 [file foo.py]
 from typing import TypeVar, Callable, Any
@@ -565,7 +553,6 @@ def bar() -> None:
 ==
 
 [case testSuggestInferFuncDecorator3]
-# flags: --strict-optional
 # suggest: foo.foo
 [file foo.py]
 from typing import TypeVar, Callable, Any
@@ -589,7 +576,6 @@ def bar() -> None:
 ==
 
 [case testSuggestInferFuncDecorator4]
-# flags: --strict-optional
 # suggest: foo.foo
 [file dec.py]
 from typing import TypeVar, Callable, Any
@@ -616,7 +602,6 @@ def bar() -> None:
 ==
 
 [case testSuggestFlexAny1]
-# flags: --strict-optional
 # suggest: --flex-any=0.4 m.foo
 # suggest: --flex-any=0.7 m.foo
 # suggest: --flex-any=0.4 m.bar
@@ -661,7 +646,6 @@ No guesses that match criteria!
 
 
 [case testSuggestFlexAny2]
-# flags: --strict-optional
 # suggest: --flex-any=0.5 m.baz
 # suggest: --flex-any=0.0 m.baz
 # suggest: --flex-any=0.5 m.F.foo
@@ -693,7 +677,6 @@ No guesses that match criteria!
 ==
 
 [case testSuggestClassMethod]
-# flags: --strict-optional
 # suggest: foo.F.bar
 # suggest: foo.F.baz
 # suggest: foo.F.eggs
diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test
index 11a8f03590f7..66c5ee46db2f 100644
--- a/test-data/unit/fine-grained.test
+++ b/test-data/unit/fine-grained.test
@@ -2083,7 +2083,6 @@ a.py:5: error: "list" expects 1 type argument, but 2 given
 ==
 
 [case testPreviousErrorInOverloadedFunction]
-# flags: --strict-optional
 import a
 [file a.py]
 from typing import overload
@@ -3494,7 +3493,6 @@ def foo() -> None:
 b.py:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
 
 [case testNamedTupleUpdateNonRecursiveToRecursiveFine]
-# flags: --strict-optional
 import c
 [file a.py]
 from b import M
@@ -3537,7 +3535,6 @@ c.py:5: error: Incompatible types in assignment (expression has type "Optional[N
 c.py:7: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins.int, fallback=b.M], None], builtins.int, fallback=a.N]"
 
 [case testTupleTypeUpdateNonRecursiveToRecursiveFine]
-# flags: --strict-optional
 import c
 [file a.py]
 from b import M
@@ -3570,7 +3567,6 @@ c.py:4: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins.int
 c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int")
 
 [case testTypeAliasUpdateNonRecursiveToRecursiveFine]
-# flags: --strict-optional
 import c
 [file a.py]
 from b import M
@@ -4699,7 +4695,6 @@ class B:
 main:7: error: Argument 1 to "g" of "B" has incompatible type "Optional[int]"; expected "str"
 
 [case testStrictOptionalModule]
-# flags: --strict-optional
 import a
 a.y = a.x
 [file a.py]
@@ -4712,10 +4707,9 @@ x: Optional[int]
 y: int
 [out]
 ==
-main:3: error: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int")
+main:2: error: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int")
 
 [case testStrictOptionalFunction]
-# flags: --strict-optional
 import a
 from typing import Optional
 def f() -> None:
@@ -4731,10 +4725,9 @@ def g(x: int) -> None:
     pass
 [out]
 ==
-main:6: error: Argument 1 to "g" has incompatible type "Optional[int]"; expected "int"
+main:5: error: Argument 1 to "g" has incompatible type "Optional[int]"; expected "int"
 
 [case testStrictOptionalMethod]
-# flags: --strict-optional
 import a
 from typing import Optional
 class C:
@@ -4753,7 +4746,7 @@ class B:
         pass
 [out]
 ==
-main:7: error: Argument 1 to "g" of "B" has incompatible type "Optional[int]"; expected "int"
+main:6: error: Argument 1 to "g" of "B" has incompatible type "Optional[int]"; expected "int"
 
 [case testPerFileStrictOptionalModule]
 import a
@@ -7953,7 +7946,7 @@ class Foo(a.I):
 ==
 
 [case testImplicitOptionalRefresh1]
-# flags: --strict-optional --implicit-optional
+# flags: --implicit-optional
 from x import f
 def foo(x: int = None) -> None:
     f()
@@ -9793,7 +9786,6 @@ class ExampleClass(Generic[T]):
 [out]
 ==
 [case testStrictNoneAttribute]
-# flags: --strict-optional
 from typing import Generic, TypeVar
 
 T = TypeVar('T', int, str)
@@ -10046,7 +10038,6 @@ class C(B): ...
 main.py:4: note: Revealed type is "def () -> builtins.str"
 
 [case testAbstractBodyTurnsEmpty]
-# flags: --strict-optional
 from b import Base
 
 class Sub(Base):
@@ -10066,10 +10057,9 @@ class Base:
     def meth(self) -> int: ...
 [out]
 ==
-main:6: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe
+main:5: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe
 
 [case testAbstractBodyTurnsEmptyProtocol]
-# flags: --strict-optional
 from b import Base
 
 class Sub(Base):
@@ -10086,7 +10076,7 @@ class Base(Protocol):
     def meth(self) -> int: ...
 [out]
 ==
-main:6: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe
+main:5: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe
 
 [case testPrettyMessageSorting]
 # flags: --pretty
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index 289005b36d9a..754cb21c3ff8 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -1649,7 +1649,6 @@ foo(list((list(""), "")))
 [out]
 
 [case testNarrowTypeForDictKeys]
-# flags: --strict-optional
 from typing import Dict, KeysView, Optional
 
 d: Dict[str, int]
@@ -1667,10 +1666,10 @@ else:
     reveal_type(k)
 
 [out]
-_testNarrowTypeForDictKeys.py:7: note: Revealed type is "builtins.str"
-_testNarrowTypeForDictKeys.py:9: note: Revealed type is "Union[builtins.str, None]"
-_testNarrowTypeForDictKeys.py:14: note: Revealed type is "builtins.str"
-_testNarrowTypeForDictKeys.py:16: note: Revealed type is "Union[builtins.str, None]"
+_testNarrowTypeForDictKeys.py:6: note: Revealed type is "builtins.str"
+_testNarrowTypeForDictKeys.py:8: note: Revealed type is "Union[builtins.str, None]"
+_testNarrowTypeForDictKeys.py:13: note: Revealed type is "builtins.str"
+_testNarrowTypeForDictKeys.py:15: note: Revealed type is "Union[builtins.str, None]"
 
 [case testTypeAliasWithNewStyleUnion]
 # flags: --python-version 3.10

From b6b6624655826985f75dfd970e2c29f7690ce323 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Mon, 17 Jul 2023 15:27:52 -0400
Subject: [PATCH 011/288] tests: skip-path-normalization should be a testcase
 option (#15660)

The "Skip path normalization" option applies to all [out]s of a test
case, so it's more correct for it to be a "case" option rather than an
"out" option.

This also simplifies the parsing of "out" sections' args.
---
 mypy/test/data.py                 | 13 +++++++------
 test-data/unit/check-literal.test |  8 ++++----
 2 files changed, 11 insertions(+), 10 deletions(-)

diff --git a/mypy/test/data.py b/mypy/test/data.py
index 66dafaff775a..de0267daf918 100644
--- a/mypy/test/data.py
+++ b/mypy/test/data.py
@@ -65,7 +65,6 @@ def parse_test_case(case: DataDrivenTestCase) -> None:
         join = posixpath.join
 
     out_section_missing = case.suite.required_out_section
-    normalize_output = True
 
     files: list[tuple[str, str]] = []  # path and contents
     output_files: list[tuple[str, str | Pattern[str]]] = []  # output path and contents
@@ -156,8 +155,6 @@ def _item_fail(msg: str) -> NoReturn:
 
             version_check = True
             for arg in args:
-                if arg == "skip-path-normalization":
-                    normalize_output = False
                 if arg.startswith("version"):
                     compare_op = arg[7:9]
                     if compare_op not in {">=", "=="}:
@@ -185,7 +182,7 @@ def _item_fail(msg: str) -> NoReturn:
                         version_check = sys.version_info[: len(version)] == version
             if version_check:
                 tmp_output = [expand_variables(line) for line in item.data]
-                if os.path.sep == "\\" and normalize_output:
+                if os.path.sep == "\\" and case.normalize_output:
                     tmp_output = [fix_win_path(line) for line in tmp_output]
                 if item.id == "out" or item.id == "out1":
                     output = tmp_output
@@ -239,7 +236,6 @@ def _item_fail(msg: str) -> NoReturn:
     case.expected_rechecked_modules = rechecked_modules
     case.deleted_paths = deleted_paths
     case.triggered = triggered or []
-    case.normalize_output = normalize_output
     case.expected_fine_grained_targets = targets
     case.test_modules = test_modules
 
@@ -269,7 +265,7 @@ class DataDrivenTestCase(pytest.Item):
 
     # Whether or not we should normalize the output to standardize things like
     # forward vs backward slashes in file paths for Windows vs Linux.
-    normalize_output = True
+    normalize_output: bool
 
     # Extra attributes used by some tests.
     last_line: int
@@ -281,10 +277,12 @@ def __init__(
         self,
         parent: DataSuiteCollector,
         suite: DataSuite,
+        *,
         file: str,
         name: str,
         writescache: bool,
         only_when: str,
+        normalize_output: bool,
         platform: str | None,
         skip: bool,
         xfail: bool,
@@ -296,6 +294,7 @@ def __init__(
         self.file = file
         self.writescache = writescache
         self.only_when = only_when
+        self.normalize_output = normalize_output
         if (platform == "windows" and sys.platform != "win32") or (
             platform == "posix" and sys.platform == "win32"
         ):
@@ -651,6 +650,7 @@ def pytest_pycollect_makeitem(collector: Any, name: str, obj: object) -> Any | N
     r"(?P<name>[a-zA-Z_0-9]+)"
     r"(?P<writescache>-writescache)?"
     r"(?P<only_when>-only_when_cache|-only_when_nocache)?"
+    r"(?P<skip_path_normalization>-skip_path_normalization)?"
     r"(-(?P<platform>posix|windows))?"
     r"(?P<skip>-skip)?"
     r"(?P<xfail>-xfail)?"
@@ -694,6 +694,7 @@ def split_test_cases(
             platform=m.group("platform"),
             skip=bool(m.group("skip")),
             xfail=bool(m.group("xfail")),
+            normalize_output=not m.group("skip_path_normalization"),
             data=data,
             line=line_no,
         )
diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test
index f63f4026c4b6..4498b2ddc9cf 100644
--- a/test-data/unit/check-literal.test
+++ b/test-data/unit/check-literal.test
@@ -278,7 +278,7 @@ reveal_type(c_bytes_wrapper_alias)    # N: Revealed type is "__main__.Wrap[Liter
 [builtins fixtures/tuple.pyi]
 [out]
 
-[case testLiteralUnicodeWeirdCharacters]
+[case testLiteralUnicodeWeirdCharacters-skip_path_normalization]
 from typing import Any
 from typing_extensions import Literal
 
@@ -334,7 +334,7 @@ a1 = b3
 a1 = c3  # E: Incompatible types in assignment (expression has type "Literal['¬b ∧ λ(p)']", variable has type "Literal['\x00¬b ∧ λ(p)']")
 [builtins fixtures/tuple.pyi]
 
-[out skip-path-normalization]
+[out]
 
 [case testLiteralRenamingImportWorks]
 from typing_extensions import Literal as Foo
@@ -478,7 +478,7 @@ reveal_type(f5)  # N: Revealed type is "def (x: Literal['foo']) -> Literal['foo'
 [builtins fixtures/tuple.pyi]
 [out]
 
-[case testLiteralBasicStrUsageSlashes]
+[case testLiteralBasicStrUsageSlashes-skip_path_normalization]
 from typing_extensions import Literal
 
 a: Literal[r"foo\nbar"]
@@ -487,7 +487,7 @@ b: Literal["foo\nbar"]
 reveal_type(a)
 reveal_type(b)
 [builtins fixtures/tuple.pyi]
-[out skip-path-normalization]
+[out]
 main:6: note: Revealed type is "Literal['foo\\nbar']"
 main:7: note: Revealed type is "Literal['foo\nbar']"
 

From 89ad125fa5a31a7c82f267e957d3c94da6b52f61 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Wed, 19 Jul 2023 01:07:53 +0100
Subject: [PATCH 012/288] Update commit hashes following typeshed sync (#15690)

Followup to #15681
---
 misc/sync-typeshed.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py
index 3f870d574d38..36967f86262e 100644
--- a/misc/sync-typeshed.py
+++ b/misc/sync-typeshed.py
@@ -179,10 +179,10 @@ def main() -> None:
     print("Created typeshed sync commit.")
 
     commits_to_cherry_pick = [
-        "9f4c0d8af",  # LiteralString reverts
-        "56f434336",  # sum reverts
-        "71c4269df",  # ctypes reverts
-        "186fbb18e",  # ParamSpec for functools.wraps
+        "2f6b6e66c",  # LiteralString reverts
+        "120af30e7",  # sum reverts
+        "1866d28f1",  # ctypes reverts
+        "3240da455",  # ParamSpec for functools.wraps
     ]
     for commit in commits_to_cherry_pick:
         try:

From 88c0c644c78e2f31ed25a523ad1f74727f2d647f Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Thu, 20 Jul 2023 21:06:59 +0300
Subject: [PATCH 013/288] Update LICENSE with copyright year (#15727)

---
 LICENSE | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/LICENSE b/LICENSE
index 991496cb4878..55d01ee19ad8 100644
--- a/LICENSE
+++ b/LICENSE
@@ -4,8 +4,8 @@ Mypy (and mypyc) are licensed under the terms of the MIT license, reproduced bel
 
 The MIT License
 
-Copyright (c) 2012-2022 Jukka Lehtosalo and contributors
-Copyright (c) 2015-2022 Dropbox, Inc.
+Copyright (c) 2012-2023 Jukka Lehtosalo and contributors
+Copyright (c) 2015-2023 Dropbox, Inc.
 
 Permission is hereby granted, free of charge, to any person obtaining a
 copy of this software and associated documentation files (the "Software"),

From 383137baaf36876dcf935c2f5be053b6419097d0 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Thu, 20 Jul 2023 22:38:20 +0300
Subject: [PATCH 014/288] Remove more unused scripts (#15728)

---
 misc/async_matrix.py          | 149 -----------------------
 misc/fix_annotate.py          | 218 ----------------------------------
 misc/remove-eol-whitespace.sh |   8 --
 3 files changed, 375 deletions(-)
 delete mode 100644 misc/async_matrix.py
 delete mode 100644 misc/fix_annotate.py
 delete mode 100644 misc/remove-eol-whitespace.sh

diff --git a/misc/async_matrix.py b/misc/async_matrix.py
deleted file mode 100644
index d4612dd81799..000000000000
--- a/misc/async_matrix.py
+++ /dev/null
@@ -1,149 +0,0 @@
-#!/usr/bin/env python3
-"""Test various combinations of generators/coroutines.
-
-This was used to cross-check the errors in the test case
-testFullCoroutineMatrix in test-data/unit/check-async-await.test.
-"""
-
-from __future__ import annotations
-
-import sys
-from types import coroutine
-from typing import Any, Awaitable, Generator, Iterator
-
-# The various things you might try to use in `await` or `yield from`.
-
-
-def plain_generator() -> Generator[str, None, int]:
-    yield "a"
-    return 1
-
-
-async def plain_coroutine() -> int:
-    return 1
-
-
-@coroutine
-def decorated_generator() -> Generator[str, None, int]:
-    yield "a"
-    return 1
-
-
-@coroutine
-async def decorated_coroutine() -> int:
-    return 1
-
-
-class It(Iterator[str]):
-    stop = False
-
-    def __iter__(self) -> It:
-        return self
-
-    def __next__(self) -> str:
-        if self.stop:
-            raise StopIteration("end")
-        else:
-            self.stop = True
-            return "a"
-
-
-def other_iterator() -> It:
-    return It()
-
-
-class Aw(Awaitable[int]):
-    def __await__(self) -> Generator[str, Any, int]:
-        yield "a"
-        return 1
-
-
-def other_coroutine() -> Aw:
-    return Aw()
-
-
-# The various contexts in which `await` or `yield from` might occur.
-
-
-def plain_host_generator(func) -> Generator[str, None, None]:
-    yield "a"
-    x = 0
-    f = func()
-    try:
-        x = yield from f  # noqa: F841
-    finally:
-        try:
-            f.close()
-        except AttributeError:
-            pass
-
-
-async def plain_host_coroutine(func) -> None:
-    x = 0
-    x = await func()  # noqa: F841
-
-
-@coroutine
-def decorated_host_generator(func) -> Generator[str, None, None]:
-    yield "a"
-    x = 0
-    f = func()
-    try:
-        x = yield from f  # noqa: F841
-    finally:
-        try:
-            f.close()
-        except AttributeError:
-            pass
-
-
-@coroutine
-async def decorated_host_coroutine(func) -> None:
-    x = 0
-    x = await func()  # noqa: F841
-
-
-# Main driver.
-
-
-def main() -> None:
-    verbose = "-v" in sys.argv
-    for host in [
-        plain_host_generator,
-        plain_host_coroutine,
-        decorated_host_generator,
-        decorated_host_coroutine,
-    ]:
-        print()
-        print("==== Host:", host.__name__)
-        for func in [
-            plain_generator,
-            plain_coroutine,
-            decorated_generator,
-            decorated_coroutine,
-            other_iterator,
-            other_coroutine,
-        ]:
-            print("  ---- Func:", func.__name__)
-            try:
-                f = host(func)
-                for i in range(10):
-                    try:
-                        x = f.send(None)
-                        if verbose:
-                            print("    yield:", x)
-                    except StopIteration as e:
-                        if verbose:
-                            print("    stop:", e.value)
-                        break
-                else:
-                    if verbose:
-                        print("    ???? still going")
-            except Exception as e:
-                print("    error:", repr(e))
-
-
-# Run main().
-
-if __name__ == "__main__":
-    main()
diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py
deleted file mode 100644
index fc8ac27466d5..000000000000
--- a/misc/fix_annotate.py
+++ /dev/null
@@ -1,218 +0,0 @@
-"""Fixer for lib2to3 that inserts mypy annotations into all methods.
-
-The simplest way to run this is to copy it into lib2to3's "fixes"
-subdirectory and then run "2to3 -f annotate" over your files.
-
-The fixer transforms e.g.
-
-  def foo(self, bar, baz=12):
-      return bar + baz
-
-into
-
-  def foo(self, bar, baz=12):
-      # type: (Any, int) -> Any
-      return bar + baz
-
-It does not do type inference but it recognizes some basic default
-argument values such as numbers and strings (and assumes their type
-implies the argument type).
-
-It also uses some basic heuristics to decide whether to ignore the
-first argument:
-
-  - always if it's named 'self'
-  - if there's a @classmethod decorator
-
-Finally, it knows that __init__() is supposed to return None.
-"""
-
-from __future__ import annotations
-
-import os
-import re
-from lib2to3.fixer_base import BaseFix
-from lib2to3.fixer_util import syms, token, touch_import
-from lib2to3.patcomp import compile_pattern
-from lib2to3.pytree import Leaf, Node
-
-
-class FixAnnotate(BaseFix):
-    # This fixer is compatible with the bottom matcher.
-    BM_compatible = True
-
-    # This fixer shouldn't run by default.
-    explicit = True
-
-    # The pattern to match.
-    PATTERN = """
-              funcdef< 'def' name=any parameters< '(' [args=any] ')' > ':' suite=any+ >
-              """
-
-    counter = None if not os.getenv("MAXFIXES") else int(os.getenv("MAXFIXES"))
-
-    def transform(self, node, results):
-        if FixAnnotate.counter is not None:
-            if FixAnnotate.counter <= 0:
-                return
-        suite = results["suite"]
-        children = suite[0].children
-
-        # NOTE: I've reverse-engineered the structure of the parse tree.
-        # It's always a list of nodes, the first of which contains the
-        # entire suite.  Its children seem to be:
-        #
-        #   [0] NEWLINE
-        #   [1] INDENT
-        #   [2...n-2] statements (the first may be a docstring)
-        #   [n-1] DEDENT
-        #
-        # Comments before the suite are part of the INDENT's prefix.
-        #
-        # "Compact" functions (e.g. "def foo(x, y): return max(x, y)")
-        # have a different structure that isn't matched by PATTERN.
-        #
-        #   print('-'*60)
-        #   print(node)
-        #   for i, ch in enumerate(children):
-        #       print(i, repr(ch.prefix), repr(ch))
-        #
-        # Check if there's already an annotation.
-        for ch in children:
-            if ch.prefix.lstrip().startswith("# type:"):
-                return  # There's already a # type: comment here; don't change anything.
-
-        # Compute the annotation
-        annot = self.make_annotation(node, results)
-
-        # Insert '# type: {annot}' comment.
-        # For reference, see lib2to3/fixes/fix_tuple_params.py in stdlib.
-        if len(children) >= 2 and children[1].type == token.INDENT:
-            children[1].prefix = "{}# type: {}\n{}".format(
-                children[1].value, annot, children[1].prefix
-            )
-            children[1].changed()
-            if FixAnnotate.counter is not None:
-                FixAnnotate.counter -= 1
-
-        # Also add 'from typing import Any' at the top.
-        if "Any" in annot:
-            touch_import("typing", "Any", node)
-
-    def make_annotation(self, node, results):
-        name = results["name"]
-        assert isinstance(name, Leaf), repr(name)
-        assert name.type == token.NAME, repr(name)
-        decorators = self.get_decorators(node)
-        is_method = self.is_method(node)
-        if name.value == "__init__" or not self.has_return_exprs(node):
-            restype = "None"
-        else:
-            restype = "Any"
-        args = results.get("args")
-        argtypes = []
-        if isinstance(args, Node):
-            children = args.children
-        elif isinstance(args, Leaf):
-            children = [args]
-        else:
-            children = []
-        # Interpret children according to the following grammar:
-        # (('*'|'**')? NAME ['=' expr] ','?)*
-        stars = inferred_type = ""
-        in_default = False
-        at_start = True
-        for child in children:
-            if isinstance(child, Leaf):
-                if child.value in ("*", "**"):
-                    stars += child.value
-                elif child.type == token.NAME and not in_default:
-                    if not is_method or not at_start or "staticmethod" in decorators:
-                        inferred_type = "Any"
-                    else:
-                        # Always skip the first argument if it's named 'self'.
-                        # Always skip the first argument of a class method.
-                        if child.value == "self" or "classmethod" in decorators:
-                            pass
-                        else:
-                            inferred_type = "Any"
-                elif child.value == "=":
-                    in_default = True
-                elif in_default and child.value != ",":
-                    if child.type == token.NUMBER:
-                        if re.match(r"\d+[lL]?$", child.value):
-                            inferred_type = "int"
-                        else:
-                            inferred_type = "float"  # TODO: complex?
-                    elif child.type == token.STRING:
-                        if child.value.startswith(("u", "U")):
-                            inferred_type = "unicode"
-                        else:
-                            inferred_type = "str"
-                    elif child.type == token.NAME and child.value in ("True", "False"):
-                        inferred_type = "bool"
-                elif child.value == ",":
-                    if inferred_type:
-                        argtypes.append(stars + inferred_type)
-                    # Reset
-                    stars = inferred_type = ""
-                    in_default = False
-                    at_start = False
-        if inferred_type:
-            argtypes.append(stars + inferred_type)
-        return "(" + ", ".join(argtypes) + ") -> " + restype
-
-    # The parse tree has a different shape when there is a single
-    # decorator vs. when there are multiple decorators.
-    DECORATED = "decorated< (d=decorator | decorators< dd=decorator+ >) funcdef >"
-    decorated = compile_pattern(DECORATED)
-
-    def get_decorators(self, node):
-        """Return a list of decorators found on a function definition.
-
-        This is a list of strings; only simple decorators
-        (e.g. @staticmethod) are returned.
-
-        If the function is undecorated or only non-simple decorators
-        are found, return [].
-        """
-        if node.parent is None:
-            return []
-        results = {}
-        if not self.decorated.match(node.parent, results):
-            return []
-        decorators = results.get("dd") or [results["d"]]
-        decs = []
-        for d in decorators:
-            for child in d.children:
-                if isinstance(child, Leaf) and child.type == token.NAME:
-                    decs.append(child.value)
-        return decs
-
-    def is_method(self, node):
-        """Return whether the node occurs (directly) inside a class."""
-        node = node.parent
-        while node is not None:
-            if node.type == syms.classdef:
-                return True
-            if node.type == syms.funcdef:
-                return False
-            node = node.parent
-        return False
-
-    RETURN_EXPR = "return_stmt< 'return' any >"
-    return_expr = compile_pattern(RETURN_EXPR)
-
-    def has_return_exprs(self, node):
-        """Traverse the tree below node looking for 'return expr'.
-
-        Return True if at least 'return expr' is found, False if not.
-        (If both 'return' and 'return expr' are found, return True.)
-        """
-        results = {}
-        if self.return_expr.match(node, results):
-            return True
-        return any(
-            child.type not in (syms.funcdef, syms.classdef) and self.has_return_exprs(child)
-            for child in node.children
-        )
diff --git a/misc/remove-eol-whitespace.sh b/misc/remove-eol-whitespace.sh
deleted file mode 100644
index 5cf666997e34..000000000000
--- a/misc/remove-eol-whitespace.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-
-# Remove trailing whitespace from all non-binary files in a git repo.
-
-# From https://gist.github.com/dpaluy/3690668; originally from here:
-# https://unix.stackexchange.com/questions/36233/how-to-skip-file-in-sed-if-it-contains-regex/36240#36240
-
-git grep -I --name-only -z -e '' | xargs -0 sed -i -e 's/[ \t]\+\(\r\?\)$/\1/'

From 14e7768c3bd8d1164e887ce3becba3459ebcfaa4 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Fri, 21 Jul 2023 22:27:36 +0300
Subject: [PATCH 015/288] Raise errors on unbound TypeVars with values (#15732)

Completes a `TODO` item :)

Refs https://github.com/python/mypy/issues/15724

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 mypy/checker.py                           | 7 +++++++
 test-data/unit/check-typevar-unbound.test | 3 +--
 test-data/unit/deps-generics.test         | 2 +-
 3 files changed, 9 insertions(+), 3 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index f2873c7d58e4..724a1dd1f7d7 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -1069,6 +1069,7 @@ def check_func_def(
         """Type check a function definition."""
         # Expand type variables with value restrictions to ordinary types.
         expanded = self.expand_typevars(defn, typ)
+        original_typ = typ
         for item, typ in expanded:
             old_binder = self.binder
             self.binder = ConditionalTypeBinder()
@@ -1126,6 +1127,12 @@ def check_func_def(
                             message_registry.RETURN_TYPE_CANNOT_BE_CONTRAVARIANT, typ.ret_type
                         )
                     self.check_unbound_return_typevar(typ)
+                elif (
+                    isinstance(original_typ.ret_type, TypeVarType) and original_typ.ret_type.values
+                ):
+                    # Since type vars with values are expanded, the return type is changed
+                    # to a raw value. This is a hack to get it back.
+                    self.check_unbound_return_typevar(original_typ)
 
                 # Check that Generator functions have the appropriate return type.
                 if defn.is_generator:
diff --git a/test-data/unit/check-typevar-unbound.test b/test-data/unit/check-typevar-unbound.test
index d3e54c75e373..ed6beaa100db 100644
--- a/test-data/unit/check-typevar-unbound.test
+++ b/test-data/unit/check-typevar-unbound.test
@@ -15,8 +15,7 @@ def g() -> U: # E: A function returning TypeVar should receive at least one argu
 
 V = TypeVar('V', int, str)
 
-# TODO: this should also give an error
-def h() -> V:
+def h() -> V:  # E: A function returning TypeVar should receive at least one argument containing the same TypeVar
     ...
 
 [case testInnerFunctionTypeVar]
diff --git a/test-data/unit/deps-generics.test b/test-data/unit/deps-generics.test
index c78f3fad90c0..6baa57266d2f 100644
--- a/test-data/unit/deps-generics.test
+++ b/test-data/unit/deps-generics.test
@@ -159,7 +159,7 @@ class D: pass
 T = TypeVar('T', A, B)
 S = TypeVar('S', C, D)
 
-def f(x: T) -> S:
+def f(x: T, y: S) -> S:
     pass
 [out]
 <m.A> -> <m.T>, <m.f>, m, m.A, m.f

From 6bdcc92002a5e1a6feb1528d0221802f7514c836 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Sat, 22 Jul 2023 21:20:36 +0100
Subject: [PATCH 016/288] stubtest: Fix `__mypy-replace` false positives
 (#15689)

---
 mypy/stubtest.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index f06faa962b07..906a8c923b37 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -496,7 +496,11 @@ def verify_typeinfo(
     )
 
     # Check everything already defined on the stub class itself (i.e. not inherited)
-    to_check = set(stub.names)
+    #
+    # Filter out non-identifier names, as these are (hopefully always?) whacky/fictional things
+    # (like __mypy-replace or __mypy-post_init, etc.) that don't exist at runtime,
+    # and exist purely for internal mypy reasons
+    to_check = {name for name in stub.names if name.isidentifier()}
     # Check all public things on the runtime class
     to_check.update(
         m for m in vars(runtime) if not is_probably_private(m) and m not in IGNORABLE_CLASS_DUNDERS

From d2022a0007c0eb176ccaf37a9aa54c958be7fb10 Mon Sep 17 00:00:00 2001
From: Ali Hamdan <ali.hamdan.dev@gmail.com>
Date: Sun, 23 Jul 2023 01:43:44 +0200
Subject: [PATCH 017/288] Add `__match_args__` to dataclasses with no fields
 (#15749)

---
 mypy/plugins/dataclasses.py           | 1 -
 test-data/unit/check-dataclasses.test | 5 +++++
 2 files changed, 5 insertions(+), 1 deletion(-)

diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py
index a4babe7faf61..d782acf50af5 100644
--- a/mypy/plugins/dataclasses.py
+++ b/mypy/plugins/dataclasses.py
@@ -365,7 +365,6 @@ def transform(self) -> bool:
             and (
                 "__match_args__" not in info.names or info.names["__match_args__"].plugin_generated
             )
-            and attributes
             and py_version >= (3, 10)
         ):
             str_type = self._api.named_type("builtins.str")
diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test
index 3866442230bf..1e01a72921f7 100644
--- a/test-data/unit/check-dataclasses.test
+++ b/test-data/unit/check-dataclasses.test
@@ -1892,6 +1892,11 @@ class Two:
     bar: int
 t: Two
 reveal_type(t.__match_args__)  # N: Revealed type is "Tuple[Literal['bar']]"
+@dataclass
+class Empty:
+    ...
+e: Empty
+reveal_type(e.__match_args__)  # N: Revealed type is "Tuple[]"
 [builtins fixtures/dataclasses.pyi]
 
 [case testDataclassWithoutMatchArgs]

From 01c6994ac01e3822fe89c5dc46f8bc8b656c8263 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Wed, 26 Jul 2023 03:34:51 -0400
Subject: [PATCH 018/288] Don't flag intentionally empty generators unreachable
 (#15722)

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 mypy/binder.py                             |  8 -------
 mypy/checker.py                            | 25 ++++++++++++++++++++--
 test-data/unit/check-unreachable-code.test | 16 ++++++++++++++
 3 files changed, 39 insertions(+), 10 deletions(-)

diff --git a/mypy/binder.py b/mypy/binder.py
index 37c0b6bb9006..8a68f24f661e 100644
--- a/mypy/binder.py
+++ b/mypy/binder.py
@@ -42,13 +42,6 @@ def __init__(self, id: int, conditional_frame: bool = False) -> None:
         self.types: dict[Key, Type] = {}
         self.unreachable = False
         self.conditional_frame = conditional_frame
-
-        # Should be set only if we're entering a frame where it's not
-        # possible to accurately determine whether or not contained
-        # statements will be unreachable or not.
-        #
-        # Long-term, we should improve mypy to the point where we no longer
-        # need this field.
         self.suppress_unreachable_warnings = False
 
     def __repr__(self) -> str:
@@ -174,7 +167,6 @@ def is_unreachable(self) -> bool:
         return any(f.unreachable for f in self.frames)
 
     def is_unreachable_warning_suppressed(self) -> bool:
-        # TODO: See todo in 'is_unreachable'
         return any(f.suppress_unreachable_warnings for f in self.frames)
 
     def cleanse(self, expr: Expression) -> None:
diff --git a/mypy/checker.py b/mypy/checker.py
index 724a1dd1f7d7..e0cd02e74573 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -132,6 +132,7 @@
     Var,
     WhileStmt,
     WithStmt,
+    YieldExpr,
     is_final_node,
 )
 from mypy.options import Options
@@ -1241,13 +1242,17 @@ def check_func_def(
                             new_frame.types[key] = narrowed_type
                             self.binder.declarations[key] = old_binder.declarations[key]
                 with self.scope.push_function(defn):
-                    # We suppress reachability warnings when we use TypeVars with value
+                    # We suppress reachability warnings for empty generator functions
+                    # (return; yield) which have a "yield" that's unreachable by definition
+                    # since it's only there to promote the function into a generator function.
+                    #
+                    # We also suppress reachability warnings when we use TypeVars with value
                     # restrictions: we only want to report a warning if a certain statement is
                     # marked as being suppressed in *all* of the expansions, but we currently
                     # have no good way of doing this.
                     #
                     # TODO: Find a way of working around this limitation
-                    if len(expanded) >= 2:
+                    if _is_empty_generator_function(item) or len(expanded) >= 2:
                         self.binder.suppress_unreachable_warnings()
                     self.accept(item.body)
                 unreachable = self.binder.is_unreachable()
@@ -6968,6 +6973,22 @@ def is_literal_not_implemented(n: Expression) -> bool:
     return isinstance(n, NameExpr) and n.fullname == "builtins.NotImplemented"
 
 
+def _is_empty_generator_function(func: FuncItem) -> bool:
+    """
+    Checks whether a function's body is 'return; yield' (the yield being added only
+    to promote the function into a generator function).
+    """
+    body = func.body.body
+    return (
+        len(body) == 2
+        and isinstance(ret_stmt := body[0], ReturnStmt)
+        and (ret_stmt.expr is None or is_literal_none(ret_stmt.expr))
+        and isinstance(expr_stmt := body[1], ExpressionStmt)
+        and isinstance(yield_expr := expr_stmt.expr, YieldExpr)
+        and (yield_expr.expr is None or is_literal_none(yield_expr.expr))
+    )
+
+
 def builtin_item_type(tp: Type) -> Type | None:
     """Get the item type of a builtin container.
 
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
index 76ecd9f51e35..7a6c2cbfd1c7 100644
--- a/test-data/unit/check-unreachable-code.test
+++ b/test-data/unit/check-unreachable-code.test
@@ -1446,3 +1446,19 @@ def f() -> None:
     Foo()['a'] = 'a'
     x = 0 # This should not be reported as unreachable
 [builtins fixtures/exception.pyi]
+
+[case testIntentionallyEmptyGeneratorFunction]
+# flags: --warn-unreachable
+from typing import Generator
+
+def f() -> Generator[None, None, None]:
+    return
+    yield
+
+[case testIntentionallyEmptyGeneratorFunction_None]
+# flags: --warn-unreachable
+from typing import Generator
+
+def f() -> Generator[None, None, None]:
+    return None
+    yield None

From b901d21194400b856a88df62a3d7db871936a50d Mon Sep 17 00:00:00 2001
From: Marcel Johannesmann <mj0nez@fn.de>
Date: Wed, 26 Jul 2023 20:50:13 +0200
Subject: [PATCH 019/288] docs: add missing verb (#15765)

---
 docs/source/cheat_sheet_py3.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst
index 297427e72aca..fe5761ca6187 100644
--- a/docs/source/cheat_sheet_py3.rst
+++ b/docs/source/cheat_sheet_py3.rst
@@ -104,7 +104,7 @@ Functions
        print(value + "!" * excitement)
 
    # Note that arguments without a type are dynamically typed (treated as Any)
-   # and that functions without any annotations not checked
+   # and that functions without any annotations are not checked
    def untyped(x):
        x.anything() + 1 + "string"  # no errors
 

From a8467c43fb6423cc3f7f330f361e6b5af0bf284f Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Fri, 28 Jul 2023 14:59:18 +0300
Subject: [PATCH 020/288] [stubgen] Add required `...` rhs to `NamedTuple`
 fields with default values (#15680)

Closes https://github.com/python/mypy/issues/15638
---
 mypy/stubgen.py             | 19 ++++++++++++-
 test-data/unit/stubgen.test | 56 +++++++++++++++++++++++++++++++++++++
 2 files changed, 74 insertions(+), 1 deletion(-)

diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index 9084da2053cf..a77ee738d56f 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -102,6 +102,7 @@
     OverloadedFuncDef,
     Statement,
     StrExpr,
+    TempNode,
     TupleExpr,
     TypeInfo,
     UnaryExpr,
@@ -637,6 +638,7 @@ def __init__(
         self._state = EMPTY
         self._toplevel_names: list[str] = []
         self._include_private = include_private
+        self._current_class: ClassDef | None = None
         self.import_tracker = ImportTracker()
         # Was the tree semantically analysed before?
         self.analyzed = analyzed
@@ -886,6 +888,7 @@ def get_fullname(self, expr: Expression) -> str:
         return resolved_name
 
     def visit_class_def(self, o: ClassDef) -> None:
+        self._current_class = o
         self.method_names = find_method_names(o.defs.body)
         sep: int | None = None
         if not self._indent and self._state != EMPTY:
@@ -922,6 +925,7 @@ def visit_class_def(self, o: ClassDef) -> None:
         else:
             self._state = CLASS
         self.method_names = set()
+        self._current_class = None
 
     def get_base_types(self, cdef: ClassDef) -> list[str]:
         """Get list of base classes for a class."""
@@ -1330,7 +1334,20 @@ def get_init(
                 typename += f"[{final_arg}]"
         else:
             typename = self.get_str_type_of_node(rvalue)
-        return f"{self._indent}{lvalue}: {typename}\n"
+        initializer = self.get_assign_initializer(rvalue)
+        return f"{self._indent}{lvalue}: {typename}{initializer}\n"
+
+    def get_assign_initializer(self, rvalue: Expression) -> str:
+        """Does this rvalue need some special initializer value?"""
+        if self._current_class and self._current_class.info:
+            # Current rules
+            # 1. Return `...` if we are dealing with `NamedTuple` and it has an existing default value
+            if self._current_class.info.is_named_tuple and not isinstance(rvalue, TempNode):
+                return " = ..."
+            # TODO: support other possible cases, where initializer is important
+
+        # By default, no initializer is required:
+        return ""
 
     def add(self, string: str) -> None:
         """Add text to generated stub."""
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index b387aa840dc9..f6b71a994153 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -698,6 +698,62 @@ class Y(NamedTuple):
     a: int
     b: str
 
+[case testNamedTupleClassSyntax_semanal]
+from typing import NamedTuple
+
+class A(NamedTuple):
+    x: int
+    y: str = 'a'
+
+class B(A):
+    z1: str
+    z2 = 1
+    z3: str = 'b'
+
+class RegularClass:
+    x: int
+    y: str = 'a'
+    class NestedNamedTuple(NamedTuple):
+        x: int
+        y: str = 'a'
+    z: str = 'b'
+[out]
+from typing import NamedTuple
+
+class A(NamedTuple):
+    x: int
+    y: str = ...
+
+class B(A):
+    z1: str
+    z2: int
+    z3: str
+
+class RegularClass:
+    x: int
+    y: str
+    class NestedNamedTuple(NamedTuple):
+        x: int
+        y: str = ...
+    z: str
+
+
+[case testNestedClassInNamedTuple_semanal-xfail]
+from typing import NamedTuple
+
+# TODO: make sure that nested classes in `NamedTuple` are supported:
+class NamedTupleWithNestedClass(NamedTuple):
+    class Nested:
+        x: int
+        y: str = 'a'
+[out]
+from typing import NamedTuple
+
+class NamedTupleWithNestedClass(NamedTuple):
+    class Nested:
+        x: int
+        y: str
+
 [case testEmptyNamedtuple]
 import collections, typing
 X = collections.namedtuple('X', [])

From da1853ff7f764157511ece4305a11369f63353f5 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sat, 29 Jul 2023 11:05:41 +0300
Subject: [PATCH 021/288] Correctly narrow types for `tuple[type[X], ...]`
 (#15691)

`flatten_types` forgot about the second way we represent `tuple` inside.

Closes https://github.com/python/mypy/issues/15443

---------

Co-authored-by: Ilya Priven <ilya.konstantinov@gmail.com>
---
 mypy/checker.py                     |  2 ++
 test-data/unit/check-narrowing.test | 47 +++++++++++++++++++++++++++++
 2 files changed, 49 insertions(+)

diff --git a/mypy/checker.py b/mypy/checker.py
index e0cd02e74573..30dbdd01d972 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -7146,6 +7146,8 @@ def flatten_types(t: Type) -> list[Type]:
     t = get_proper_type(t)
     if isinstance(t, TupleType):
         return [b for a in t.items for b in flatten_types(a)]
+    elif is_named_instance(t, "builtins.tuple"):
+        return [t.args[0]]
     else:
         return [t]
 
diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test
index 22014d4c645c..b763e0ff3b68 100644
--- a/test-data/unit/check-narrowing.test
+++ b/test-data/unit/check-narrowing.test
@@ -1261,3 +1261,50 @@ def g() -> None:
         def foo(): ...
     foo()
 [builtins fixtures/dict.pyi]
+
+
+[case testNarrowingWithTupleOfTypes]
+from typing import Tuple, Type
+
+class Base: ...
+
+class Impl1(Base): ...
+class Impl2(Base): ...
+
+impls: Tuple[Type[Base], ...] = (Impl1, Impl2)
+some: object
+
+if isinstance(some, impls):
+    reveal_type(some)  # N: Revealed type is "__main__.Base"
+else:
+    reveal_type(some)  # N: Revealed type is "builtins.object"
+
+raw: Tuple[type, ...]
+if isinstance(some, raw):
+    reveal_type(some)  # N: Revealed type is "builtins.object"
+else:
+    reveal_type(some)  # N: Revealed type is "builtins.object"
+[builtins fixtures/dict.pyi]
+
+
+[case testNarrowingWithTupleOfTypesPy310Plus]
+# flags: --python-version 3.10
+class Base: ...
+
+class Impl1(Base): ...
+class Impl2(Base): ...
+
+some: int | Base
+
+impls: tuple[type[Base], ...] = (Impl1, Impl2)
+if isinstance(some, impls):
+    reveal_type(some)  # N: Revealed type is "__main__.Base"
+else:
+    reveal_type(some)  # N: Revealed type is "Union[builtins.int, __main__.Base]"
+
+raw: tuple[type, ...]
+if isinstance(some, raw):
+    reveal_type(some)  # N: Revealed type is "Union[builtins.int, __main__.Base]"
+else:
+    reveal_type(some)  # N: Revealed type is "Union[builtins.int, __main__.Base]"
+[builtins fixtures/dict.pyi]

From 14efdf2f1ec098b59b65796b3a37bd84210eca85 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sat, 29 Jul 2023 14:47:02 +0300
Subject: [PATCH 022/288] [stubtest] Test `NamedTuple` definitions with default
 fields (#15774)

This is a test case for https://github.com/python/mypy/pull/15680 from
`stubtest`'s point of view.
---
 mypy/test/teststubtest.py | 68 +++++++++++++++++++++++++++++++++++++++
 1 file changed, 68 insertions(+)

diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py
index 661d46e9fd8a..cd72bd9300d1 100644
--- a/mypy/test/teststubtest.py
+++ b/mypy/test/teststubtest.py
@@ -68,6 +68,7 @@ class Mapping(Generic[_K, _V]): ...
 class Match(Generic[AnyStr]): ...
 class Sequence(Iterable[_T_co]): ...
 class Tuple(Sequence[_T_co]): ...
+class NamedTuple(tuple[Any, ...]): ...
 def overload(func: _T) -> _T: ...
 """
 
@@ -82,6 +83,7 @@ def overload(func: _T) -> _T: ...
 class object:
     __module__: str
     def __init__(self) -> None: pass
+    def __repr__(self) -> str: pass
 class type: ...
 
 class tuple(Sequence[T_co], Generic[T_co]): ...
@@ -1599,6 +1601,72 @@ class Y(TypedDict):
             error=None,
         )
 
+    @collect_cases
+    def test_named_tuple(self) -> Iterator[Case]:
+        yield Case(
+            stub="from typing import NamedTuple",
+            runtime="from typing import NamedTuple",
+            error=None,
+        )
+        yield Case(
+            stub="""
+            class X1(NamedTuple):
+                bar: int
+                foo: str = ...
+            """,
+            runtime="""
+            class X1(NamedTuple):
+                bar: int
+                foo: str = 'a'
+            """,
+            error=None,
+        )
+        yield Case(
+            stub="""
+            class X2(NamedTuple):
+                bar: int
+                foo: str
+            """,
+            runtime="""
+            class X2(NamedTuple):
+                bar: int
+                foo: str = 'a'
+            """,
+            # `__new__` will miss a default value for a `foo` parameter,
+            # but we don't generate special errors for `foo` missing `...` part.
+            error="X2.__new__",
+        )
+
+    @collect_cases
+    def test_named_tuple_typing_and_collections(self) -> Iterator[Case]:
+        yield Case(
+            stub="from typing import NamedTuple",
+            runtime="from collections import namedtuple",
+            error=None,
+        )
+        yield Case(
+            stub="""
+            class X1(NamedTuple):
+                bar: int
+                foo: str = ...
+            """,
+            runtime="""
+            X1 = namedtuple('X1', ['bar', 'foo'], defaults=['a'])
+            """,
+            error=None,
+        )
+        yield Case(
+            stub="""
+            class X2(NamedTuple):
+                bar: int
+                foo: str
+            """,
+            runtime="""
+            X2 = namedtuple('X1', ['bar', 'foo'], defaults=['a'])
+            """,
+            error="X2.__new__",
+        )
+
     @collect_cases
     def test_type_var(self) -> Iterator[Case]:
         yield Case(

From 6040b237e31978b7f6764266a3d162acb68c7884 Mon Sep 17 00:00:00 2001
From: Mark Byrne <31762852+mbyrnepr2@users.noreply.github.com>
Date: Sat, 29 Jul 2023 18:55:19 +0200
Subject: [PATCH 023/288] Remove the Python 37 environment from the `tox.ini`
 (#15693)

Remove the Python 37 environment from the `tox.ini` since Python 3.7 is
now end of life.

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 tox.ini | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tox.ini b/tox.ini
index b5314114570b..8fc76aed7d0e 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,10 +2,10 @@
 minversion = 4.4.4
 skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True}
 envlist =
-    py37,
     py38,
     py39,
     py310,
+    py311,
     docs,
     lint,
     type,

From 8792ff1b81c98644c2563d6526dcba633fba719c Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sat, 29 Jul 2023 22:33:04 +0300
Subject: [PATCH 024/288] Raise `RuntimeError` with better error messages
 (#15778)

While working on https://github.com/python/mypy/pull/15776 I've noticed
that some `RuntimeError` do not have enough metadata to understand what
is going on.
CI:
https://github.com/python/mypy/actions/runs/5700479199/job/15450345887

This PR adds more context to error messages.
---
 mypy/erasetype.py       | 2 +-
 mypy/nodes.py           | 6 +++---
 mypy/patterns.py        | 2 +-
 mypy/server/astmerge.py | 4 ++--
 mypy/types.py           | 2 +-
 5 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/mypy/erasetype.py b/mypy/erasetype.py
index 6533d0c4e0f9..fbbb4f80b578 100644
--- a/mypy/erasetype.py
+++ b/mypy/erasetype.py
@@ -71,7 +71,7 @@ def visit_erased_type(self, t: ErasedType) -> ProperType:
 
     def visit_partial_type(self, t: PartialType) -> ProperType:
         # Should not get here.
-        raise RuntimeError()
+        raise RuntimeError("Cannot erase partial types")
 
     def visit_deleted_type(self, t: DeletedType) -> ProperType:
         return t
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 2d763fc482d3..ebd222f4f253 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -203,7 +203,7 @@ def str_with_options(self, options: Options) -> str:
         return ans
 
     def accept(self, visitor: NodeVisitor[T]) -> T:
-        raise RuntimeError("Not implemented")
+        raise RuntimeError("Not implemented", type(self))
 
 
 @trait
@@ -213,7 +213,7 @@ class Statement(Node):
     __slots__ = ()
 
     def accept(self, visitor: StatementVisitor[T]) -> T:
-        raise RuntimeError("Not implemented")
+        raise RuntimeError("Not implemented", type(self))
 
 
 @trait
@@ -223,7 +223,7 @@ class Expression(Node):
     __slots__ = ()
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
-        raise RuntimeError("Not implemented")
+        raise RuntimeError("Not implemented", type(self))
 
 
 class FakeExpression(Expression):
diff --git a/mypy/patterns.py b/mypy/patterns.py
index 32c27d2a5b3c..839864ef5879 100644
--- a/mypy/patterns.py
+++ b/mypy/patterns.py
@@ -19,7 +19,7 @@ class Pattern(Node):
     __slots__ = ()
 
     def accept(self, visitor: PatternVisitor[T]) -> T:
-        raise RuntimeError("Not implemented")
+        raise RuntimeError("Not implemented", type(self))
 
 
 class AsPattern(Pattern):
diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py
index 5e3759227c7b..f58a4eedabc8 100644
--- a/mypy/server/astmerge.py
+++ b/mypy/server/astmerge.py
@@ -467,13 +467,13 @@ def visit_overloaded(self, t: Overloaded) -> None:
 
     def visit_erased_type(self, t: ErasedType) -> None:
         # This type should exist only temporarily during type inference
-        raise RuntimeError
+        raise RuntimeError("Cannot handle erased type")
 
     def visit_deleted_type(self, typ: DeletedType) -> None:
         pass
 
     def visit_partial_type(self, typ: PartialType) -> None:
-        raise RuntimeError
+        raise RuntimeError("Cannot handle partial type")
 
     def visit_tuple_type(self, typ: TupleType) -> None:
         for item in typ.items:
diff --git a/mypy/types.py b/mypy/types.py
index ba629a3553cf..9eeaa2cc4c3f 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -260,7 +260,7 @@ def can_be_false_default(self) -> bool:
         return True
 
     def accept(self, visitor: TypeVisitor[T]) -> T:
-        raise RuntimeError("Not implemented")
+        raise RuntimeError("Not implemented", type(self))
 
     def __repr__(self) -> str:
         return self.accept(TypeStrVisitor(options=Options()))

From 710ad44916fa89b430407c02a62a6df98f3a06f8 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sun, 30 Jul 2023 00:50:37 +0300
Subject: [PATCH 025/288] Better `tox` configuration (#15777)

It solves two problems:
1. `fix_annotate` and `async_matrix` were removed in
https://github.com/python/mypy/pull/15728
2. It is better to reuse stuff like `runtests.py` not to rewrite the
same command we already have

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 runtests.py | 12 +++++++++++-
 tox.ini     |  4 ++--
 2 files changed, 13 insertions(+), 3 deletions(-)

diff --git a/runtests.py b/runtests.py
index 66fade81ffab..80ef8d814ee1 100755
--- a/runtests.py
+++ b/runtests.py
@@ -48,7 +48,17 @@
 # time to run.
 cmds = {
     # Self type check
-    "self": [executable, "-m", "mypy", "--config-file", "mypy_self_check.ini", "-p", "mypy"],
+    "self": [
+        executable,
+        "-m",
+        "mypy",
+        "--config-file",
+        "mypy_self_check.ini",
+        "-p",
+        "mypy",
+        "-p",
+        "mypyc",
+    ],
     # Lint
     "lint": ["pre-commit", "run", "--all-files"],
     # Fast test cases only (this is the bulk of the test suite)
diff --git a/tox.ini b/tox.ini
index 8fc76aed7d0e..5a728e27fec4 100644
--- a/tox.ini
+++ b/tox.ini
@@ -53,5 +53,5 @@ passenv =
     MYPY_FORCE_COLOR
     MYPY_FORCE_TERMINAL_WIDTH
 commands =
-    python -m mypy --config-file mypy_self_check.ini -p mypy -p mypyc
-    python -m mypy --config-file mypy_self_check.ini misc --exclude misc/fix_annotate.py --exclude misc/async_matrix.py --exclude misc/sync-typeshed.py
+    python runtests.py self
+    python -m mypy --config-file mypy_self_check.ini misc --exclude misc/sync-typeshed.py

From 002502a0111852c360f2255830951473bcfec4a7 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 29 Jul 2023 15:13:21 -0700
Subject: [PATCH 026/288] Fix inference for attrs.fields (#15688)

---
 mypy/checker.py                          |  5 ++++-
 test-data/unit/check-plugin-attrs.test   |  3 +++
 test-data/unit/fixtures/plugin_attrs.pyi | 11 +++++++++--
 3 files changed, 16 insertions(+), 3 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 30dbdd01d972..a8cb2b862fbc 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -4632,7 +4632,10 @@ def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]:
         if int_type:
             return iterator, int_type
 
-        if isinstance(iterable, TupleType):
+        if (
+            isinstance(iterable, TupleType)
+            and iterable.partial_fallback.type.fullname == "builtins.tuple"
+        ):
             joined: Type = UninhabitedType()
             for item in iterable.items:
                 joined = join_types(joined, item)
diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test
index 913584224764..7a7bcb65fe98 100644
--- a/test-data/unit/check-plugin-attrs.test
+++ b/test-data/unit/check-plugin-attrs.test
@@ -1568,6 +1568,9 @@ reveal_type(f(A)[0])  # N: Revealed type is "attr.Attribute[builtins.int]"
 reveal_type(f(A).b)  # N: Revealed type is "attr.Attribute[builtins.int]"
 f(A).x  # E: "____main___A_AttrsAttributes__" has no attribute "x"
 
+for ff in f(A):
+    reveal_type(ff)  # N: Revealed type is "attr.Attribute[Any]"
+
 [builtins fixtures/plugin_attrs.pyi]
 
 [case testAttrsGenericFields]
diff --git a/test-data/unit/fixtures/plugin_attrs.pyi b/test-data/unit/fixtures/plugin_attrs.pyi
index f62104809e74..57e5ecd1b2bc 100644
--- a/test-data/unit/fixtures/plugin_attrs.pyi
+++ b/test-data/unit/fixtures/plugin_attrs.pyi
@@ -1,5 +1,5 @@
 # Builtins stub used to support attrs plugin tests.
-from typing import Union, overload
+from typing import Union, overload, Generic, Sequence, TypeVar, Type, Iterable, Iterator
 
 class object:
     def __init__(self) -> None: pass
@@ -24,6 +24,13 @@ class complex:
 
 class str: pass
 class ellipsis: pass
-class tuple: pass
 class list: pass
 class dict: pass
+
+T = TypeVar("T")
+Tco = TypeVar('Tco', covariant=True)
+class tuple(Sequence[Tco], Generic[Tco]):
+    def __new__(cls: Type[T], iterable: Iterable[Tco] = ...) -> T: ...
+    def __iter__(self) -> Iterator[Tco]: pass
+    def __contains__(self, item: object) -> bool: pass
+    def __getitem__(self, x: int) -> Tco: pass

From d71afbf89437bdf34566f50923759ead2736d93a Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sun, 30 Jul 2023 16:53:47 +0300
Subject: [PATCH 027/288] Change `tuple[]` repr to `tuple[()]` (#15783)

Closes https://github.com/python/mypy/issues/15782
---
 mypy/messages.py                        |  5 +++--
 mypy/test/testtypes.py                  |  4 ++--
 mypy/types.py                           |  2 +-
 test-data/unit/check-async-await.test   |  2 +-
 test-data/unit/check-dataclasses.test   |  2 +-
 test-data/unit/check-namedtuple.test    | 12 +++++++++++-
 test-data/unit/check-overloading.test   |  2 +-
 test-data/unit/check-python310.test     |  4 ++--
 test-data/unit/check-tuples.test        | 21 +++++++++++++++------
 test-data/unit/check-type-aliases.test  |  8 ++++----
 test-data/unit/check-typevar-tuple.test |  2 +-
 test-data/unit/fine-grained.test        |  2 +-
 test-data/unit/typexport-basic.test     |  8 ++++----
 13 files changed, 47 insertions(+), 27 deletions(-)

diff --git a/mypy/messages.py b/mypy/messages.py
index 8b88cc1678a4..c9bf26f8952e 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -2507,10 +2507,11 @@ def format_literal_value(typ: LiteralType) -> str:
         # Prefer the name of the fallback class (if not tuple), as it's more informative.
         if typ.partial_fallback.type.fullname != "builtins.tuple":
             return format(typ.partial_fallback)
+        type_items = format_list(typ.items) or "()"
         if options.use_lowercase_names():
-            s = f"tuple[{format_list(typ.items)}]"
+            s = f"tuple[{type_items}]"
         else:
-            s = f"Tuple[{format_list(typ.items)}]"
+            s = f"Tuple[{type_items}]"
         return s
     elif isinstance(typ, TypedDictType):
         # If the TypedDictType is named, return the name
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
index b1f21b3be79b..59457dfa5d3b 100644
--- a/mypy/test/testtypes.py
+++ b/mypy/test/testtypes.py
@@ -129,10 +129,10 @@ def test_callable_type_with_var_args(self) -> None:
         )
         assert_equal(str(c3), "def (X? =, *Y?) -> Any")
 
-    def test_tuple_type(self) -> None:
+    def test_tuple_type_upper(self) -> None:
         options = Options()
         options.force_uppercase_builtins = True
-        assert_equal(TupleType([], self.fx.std_tuple).str_with_options(options), "Tuple[]")
+        assert_equal(TupleType([], self.fx.std_tuple).str_with_options(options), "Tuple[()]")
         assert_equal(TupleType([self.x], self.fx.std_tuple).str_with_options(options), "Tuple[X?]")
         assert_equal(
             TupleType(
diff --git a/mypy/types.py b/mypy/types.py
index 9eeaa2cc4c3f..d13cff00c06d 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -3197,7 +3197,7 @@ def visit_overloaded(self, t: Overloaded) -> str:
         return f"Overload({', '.join(a)})"
 
     def visit_tuple_type(self, t: TupleType) -> str:
-        s = self.list_str(t.items)
+        s = self.list_str(t.items) or "()"
         tuple_name = "tuple" if self.options.use_lowercase_names() else "Tuple"
         if t.partial_fallback and t.partial_fallback.type:
             fallback_name = t.partial_fallback.type.fullname
diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test
index 3b7ef53b6bd6..af6c31624b96 100644
--- a/test-data/unit/check-async-await.test
+++ b/test-data/unit/check-async-await.test
@@ -475,7 +475,7 @@ async def gen() -> AsyncGenerator[int, str]:
 
 async def h() -> None:
     g = gen()
-    await g.asend(())  # E: Argument 1 to "asend" of "AsyncGenerator" has incompatible type "Tuple[]"; expected "str"
+    await g.asend(())  # E: Argument 1 to "asend" of "AsyncGenerator" has incompatible type "Tuple[()]"; expected "str"
     reveal_type(await g.asend('hello'))  # N: Revealed type is "builtins.int"
 
 [builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test
index 1e01a72921f7..7881dfbcf1bb 100644
--- a/test-data/unit/check-dataclasses.test
+++ b/test-data/unit/check-dataclasses.test
@@ -1896,7 +1896,7 @@ reveal_type(t.__match_args__)  # N: Revealed type is "Tuple[Literal['bar']]"
 class Empty:
     ...
 e: Empty
-reveal_type(e.__match_args__)  # N: Revealed type is "Tuple[]"
+reveal_type(e.__match_args__)  # N: Revealed type is "Tuple[()]"
 [builtins fixtures/dataclasses.pyi]
 
 [case testDataclassWithoutMatchArgs]
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
index d69b924971e1..6e3628060617 100644
--- a/test-data/unit/check-namedtuple.test
+++ b/test-data/unit/check-namedtuple.test
@@ -931,6 +931,16 @@ reveal_type(A().b)  # N: Revealed type is "typing.NamedTuple"
 [builtins fixtures/tuple.pyi]
 [typing fixtures/typing-namedtuple.pyi]
 
+
+[case testEmptyNamedTupleTypeRepr]
+from typing import NamedTuple
+
+N = NamedTuple('N', [])
+n: N
+reveal_type(N)  # N: Revealed type is "def () -> Tuple[(), fallback=__main__.N]"
+reveal_type(n)  # N: Revealed type is "Tuple[(), fallback=__main__.N]"
+[builtins fixtures/tuple.pyi]
+
 [case testNamedTupleWrongfile]
 from typing import NamedTuple
 from b import Type1
@@ -1036,7 +1046,7 @@ def good6() -> NamedTuple:
 def bad1() -> NamedTuple:
     return 1  # E: Incompatible return value type (got "int", expected "NamedTuple")
 def bad2() -> NamedTuple:
-    return ()  # E: Incompatible return value type (got "Tuple[]", expected "NamedTuple")
+    return ()  # E: Incompatible return value type (got "Tuple[()]", expected "NamedTuple")
 def bad3() -> NamedTuple:
     return (1, 2)  # E: Incompatible return value type (got "Tuple[int, int]", expected "NamedTuple")
 
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index f49a15ada85c..89e5aea210b4 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -1145,7 +1145,7 @@ def f(x: str) -> None: pass
 f(1.1)
 f('')
 f(1)
-f(()) # E: No overload variant of "f" matches argument type "Tuple[]" \
+f(()) # E: No overload variant of "f" matches argument type "Tuple[()]" \
       # N: Possible overload variants: \
       # N:     def f(x: float) -> None \
       # N:     def f(x: str) -> None
diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test
index c07a90b49e63..75293ce9d193 100644
--- a/test-data/unit/check-python310.test
+++ b/test-data/unit/check-python310.test
@@ -1568,8 +1568,8 @@ class AnnAssign(stmt):
    value: str
    simple: int
 
-reveal_type(AST.__match_args__)  # N: Revealed type is "Tuple[]"
-reveal_type(stmt.__match_args__)  # N: Revealed type is "Tuple[]"
+reveal_type(AST.__match_args__)  # N: Revealed type is "Tuple[()]"
+reveal_type(stmt.__match_args__)  # N: Revealed type is "Tuple[()]"
 reveal_type(AnnAssign.__match_args__)  # N: Revealed type is "Tuple[Literal['target']?, Literal['annotation']?, Literal['value']?, Literal['simple']?]"
 
 AnnAssign.__match_args__ = ('a', 'b', 'c', 'd')  # E: Cannot assign to "__match_args__"
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index f64d24a4ed6b..cff261774663 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -143,7 +143,7 @@ t3 = None # type: Tuple[A, B]
 a, b, c = None, None, None # type: (A, B, C)
 
 if int():
-    t2 = ()        # E: Incompatible types in assignment (expression has type "Tuple[]", variable has type "Tuple[A]")
+    t2 = ()        # E: Incompatible types in assignment (expression has type "Tuple[()]", variable has type "Tuple[A]")
 if int():
     t2 = (a, a)    # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]")
 if int():
@@ -1244,9 +1244,9 @@ f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[Any, .
 from typing import Tuple
 def f(a: Tuple[()]) -> None: pass
 f(())
-f((1,))  # E: Argument 1 to "f" has incompatible type "Tuple[int]"; expected "Tuple[]"
-f(('', ''))  # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[]"
-f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[]"
+f((1,))  # E: Argument 1 to "f" has incompatible type "Tuple[int]"; expected "Tuple[()]"
+f(('', ''))  # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[()]"
+f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[()]"
 [builtins fixtures/tuple.pyi]
 
 [case testNonliteralTupleIndex]
@@ -1467,7 +1467,7 @@ from typing import Tuple
 t = ('',) * 2
 reveal_type(t)  # N: Revealed type is "Tuple[builtins.str, builtins.str]"
 t2 = ('',) * -1
-reveal_type(t2)  # N: Revealed type is "Tuple[]"
+reveal_type(t2)  # N: Revealed type is "Tuple[()]"
 t3 = ('', 1) * 2
 reveal_type(t3)  # N: Revealed type is "Tuple[builtins.str, builtins.int, builtins.str, builtins.int]"
 def f() -> Tuple[str, ...]:
@@ -1475,12 +1475,21 @@ def f() -> Tuple[str, ...]:
 reveal_type(f() * 2)  # N: Revealed type is "builtins.tuple[builtins.str, ...]"
 [builtins fixtures/tuple.pyi]
 
+[case testEmptyTupleTypeRepr]
+from typing import Tuple
+
+def f() -> Tuple[()]: ...
+
+reveal_type(f)    # N: Revealed type is "def () -> Tuple[()]"
+reveal_type(f())  # N: Revealed type is "Tuple[()]"
+[builtins fixtures/tuple.pyi]
+
 [case testMultiplyTupleByIntegerLiteralReverse]
 from typing import Tuple
 t = 2 * ('',)
 reveal_type(t)  # N: Revealed type is "Tuple[builtins.str, builtins.str]"
 t2 = -1 * ('',)
-reveal_type(t2)  # N: Revealed type is "Tuple[]"
+reveal_type(t2)  # N: Revealed type is "Tuple[()]"
 t3 = 2 * ('', 1)
 reveal_type(t3)  # N: Revealed type is "Tuple[builtins.str, builtins.int, builtins.str, builtins.int]"
 def f() -> Tuple[str, ...]:
diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test
index 42f22e89d6b7..3ca0c5ef0a4b 100644
--- a/test-data/unit/check-type-aliases.test
+++ b/test-data/unit/check-type-aliases.test
@@ -12,7 +12,7 @@ U = Union[int, str]
 def f(x: U) -> None: pass
 f(1)
 f('')
-f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]"
+f(()) # E: Argument 1 to "f" has incompatible type "Tuple[()]"; expected "Union[int, str]"
 [targets __main__, __main__.f]
 [builtins fixtures/tuple.pyi]
 
@@ -64,7 +64,7 @@ from _m import U
 def f(x: U) -> None: pass
 f(1)
 f('x')
-f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]"
+f(()) # E: Argument 1 to "f" has incompatible type "Tuple[()]"; expected "Union[int, str]"
 [file _m.py]
 from typing import Union
 U = Union[int, str]
@@ -170,11 +170,11 @@ f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str"
 from typing import Tuple, Callable
 EmptyTuple = Tuple[()]
 x: EmptyTuple
-reveal_type(x)  # N: Revealed type is "Tuple[]"
+reveal_type(x)  # N: Revealed type is "Tuple[()]"
 
 EmptyTupleCallable = Callable[[Tuple[()]], None]
 f: EmptyTupleCallable
-reveal_type(f)  # N: Revealed type is "def (Tuple[])"
+reveal_type(f)  # N: Revealed type is "def (Tuple[()])"
 [builtins fixtures/list.pyi]
 
 [case testForwardTypeAlias]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 1024f90ee6b7..e822cea9304f 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -58,7 +58,7 @@ f_args3: Tuple[int, str, bool]
 reveal_type(f(f_args))  # N: Revealed type is "Tuple[builtins.str, builtins.str]"
 reveal_type(f(f_args2))  # N: Revealed type is "Tuple[builtins.str]"
 reveal_type(f(f_args3))  # N: Revealed type is "Tuple[builtins.str, builtins.str, builtins.bool]"
-f(empty)  # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Tuple[int]"
+f(empty)  # E: Argument 1 to "f" has incompatible type "Tuple[()]"; expected "Tuple[int]"
 f(bad_args)  # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[int, str]"
 # TODO: This hits a crash where we assert len(templates.items) == 1. See visit_tuple_type
 # in mypy/constraints.py.
diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test
index 66c5ee46db2f..68f72a2aa992 100644
--- a/test-data/unit/fine-grained.test
+++ b/test-data/unit/fine-grained.test
@@ -8026,7 +8026,7 @@ A = NamedTuple('A', F)  # type: ignore
 [builtins fixtures/list.pyi]
 [out]
 ==
-b.py:3: note: Revealed type is "Tuple[, fallback=a.A]"
+b.py:3: note: Revealed type is "Tuple[(), fallback=a.A]"
 
 [case testImportOnTopOfAlias1]
 from a import A
diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test
index 0dcd0098f177..cd2afe2c1c75 100644
--- a/test-data/unit/typexport-basic.test
+++ b/test-data/unit/typexport-basic.test
@@ -294,8 +294,8 @@ import typing
 x = ()
 [builtins fixtures/primitives.pyi]
 [out]
-NameExpr(2) : Tuple[]
-TupleExpr(2) : Tuple[]
+NameExpr(2) : Tuple[()]
+TupleExpr(2) : Tuple[()]
 
 [case testInferTwoTypes]
 ## NameExpr
@@ -313,8 +313,8 @@ def f() -> None:
     x = ()
 [builtins fixtures/primitives.pyi]
 [out]
-NameExpr(3) : Tuple[]
-TupleExpr(3) : Tuple[]
+NameExpr(3) : Tuple[()]
+TupleExpr(3) : Tuple[()]
 
 
 -- Basic generics

From cb813259c3b9dff6aaa8686793cf6a0634cf1f69 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sun, 30 Jul 2023 18:48:12 +0300
Subject: [PATCH 028/288] Update pre-commit deps (#15784)

Closes https://github.com/python/mypy/pull/15526
---
 .pre-commit-config.yaml | 4 ++--
 test-requirements.txt   | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a56e1af938b8..7a4aada8d593 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,11 +6,11 @@ repos:
       - id: trailing-whitespace
       - id: end-of-file-fixer
   - repo: https://github.com/hauntsaninja/black-pre-commit-mirror
-    rev: 23.3.0  # must match test-requirements.txt
+    rev: 23.7.0  # must match test-requirements.txt
     hooks:
       - id: black
   - repo: https://github.com/astral-sh/ruff-pre-commit
-    rev: v0.0.272  # must match test-requirements.txt
+    rev: v0.0.280  # must match test-requirements.txt
     hooks:
       - id: ruff
         args: [--exit-non-zero-on-fix]
diff --git a/test-requirements.txt b/test-requirements.txt
index 5340973a4de1..6f7bec0375ad 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,7 +1,7 @@
 -r mypy-requirements.txt
 -r build-requirements.txt
 attrs>=18.0
-black==23.3.0  # must match version in .pre-commit-config.yaml
+black==23.7.0  # must match version in .pre-commit-config.yaml
 filelock>=3.3.0
 # lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014
 lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12'
@@ -11,6 +11,6 @@ psutil>=4.0
 pytest>=7.4.0
 pytest-xdist>=1.34.0
 pytest-cov>=2.10.0
-ruff==0.0.272  # must match version in .pre-commit-config.yaml
+ruff==0.0.280  # must match version in .pre-commit-config.yaml
 setuptools>=65.5.1
 tomli>=1.1.0  # needed even on py311+ so the self check passes with --python-version 3.7

From 54bc37ccade0476a1738b33cd34b6eb35d7124e1 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Tue, 1 Aug 2023 15:59:34 +0100
Subject: [PATCH 029/288] reduce frequency of pre-commit.ci autoupdate PRs
 (#15798)

---
 .pre-commit-config.yaml | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 7a4aada8d593..8ee89cbb912f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -14,3 +14,5 @@ repos:
     hooks:
       - id: ruff
         args: [--exit-non-zero-on-fix]
+ci:
+  autoupdate_schedule: quarterly

From 2b613e5ba1ada5a44f88a90528af834bf9f770a7 Mon Sep 17 00:00:00 2001
From: Marti Raudsepp <marti@juffo.org>
Date: Thu, 3 Aug 2023 02:31:00 +0300
Subject: [PATCH 030/288] Fix type narrowing of `== None` and `in (None,)`
 conditions (#15760)

---
 mypy/checker.py                        | 10 +++++-----
 mypy/checkexpr.py                      |  9 +++++++--
 mypy/plugins/common.py                 |  4 ++--
 mypy/suggestions.py                    |  6 +++---
 mypy/types_utils.py                    |  6 +++---
 test-data/unit/check-narrowing.test    | 26 ++++++++++++++++++++++++++
 test-data/unit/fixtures/primitives.pyi |  3 ++-
 7 files changed, 48 insertions(+), 16 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index a8cb2b862fbc..0c27da8b5ac8 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -216,7 +216,7 @@
     is_literal_type,
     is_named_instance,
 )
-from mypy.types_utils import is_optional, remove_optional, store_argument_type, strip_type
+from mypy.types_utils import is_overlapping_none, remove_optional, store_argument_type, strip_type
 from mypy.typetraverser import TypeTraverserVisitor
 from mypy.typevars import fill_typevars, fill_typevars_with_any, has_no_typevars
 from mypy.util import is_dunder, is_sunder, is_typeshed_file
@@ -5660,13 +5660,13 @@ def has_no_custom_eq_checks(t: Type) -> bool:
 
                     if left_index in narrowable_operand_index_to_hash:
                         # We only try and narrow away 'None' for now
-                        if is_optional(item_type):
+                        if is_overlapping_none(item_type):
                             collection_item_type = get_proper_type(
                                 builtin_item_type(iterable_type)
                             )
                             if (
                                 collection_item_type is not None
-                                and not is_optional(collection_item_type)
+                                and not is_overlapping_none(collection_item_type)
                                 and not (
                                     isinstance(collection_item_type, Instance)
                                     and collection_item_type.type.fullname == "builtins.object"
@@ -6073,7 +6073,7 @@ def refine_away_none_in_comparison(
         non_optional_types = []
         for i in chain_indices:
             typ = operand_types[i]
-            if not is_optional(typ):
+            if not is_overlapping_none(typ):
                 non_optional_types.append(typ)
 
         # Make sure we have a mixture of optional and non-optional types.
@@ -6083,7 +6083,7 @@ def refine_away_none_in_comparison(
         if_map = {}
         for i in narrowable_operand_indices:
             expr_type = operand_types[i]
-            if not is_optional(expr_type):
+            if not is_overlapping_none(expr_type):
                 continue
             if any(is_overlapping_erased_types(expr_type, t) for t in non_optional_types):
                 if_map[operands[i]] = remove_optional(expr_type)
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 62e2298ba59d..114cde8327e0 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -169,7 +169,12 @@
     is_named_instance,
     split_with_prefix_and_suffix,
 )
-from mypy.types_utils import is_generic_instance, is_optional, is_self_type_like, remove_optional
+from mypy.types_utils import (
+    is_generic_instance,
+    is_overlapping_none,
+    is_self_type_like,
+    remove_optional,
+)
 from mypy.typestate import type_state
 from mypy.typevars import fill_typevars
 from mypy.typevartuples import find_unpack_in_list
@@ -1809,7 +1814,7 @@ def infer_function_type_arguments_using_context(
         # valid results.
         erased_ctx = replace_meta_vars(ctx, ErasedType())
         ret_type = callable.ret_type
-        if is_optional(ret_type) and is_optional(ctx):
+        if is_overlapping_none(ret_type) and is_overlapping_none(ctx):
             # If both the context and the return type are optional, unwrap the optional,
             # since in 99% cases this is what a user expects. In other words, we replace
             #     Optional[T] <: Optional[int]
diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py
index 65d967577bea..55f2870cadb4 100644
--- a/mypy/plugins/common.py
+++ b/mypy/plugins/common.py
@@ -43,7 +43,7 @@
     deserialize_type,
     get_proper_type,
 )
-from mypy.types_utils import is_optional
+from mypy.types_utils import is_overlapping_none
 from mypy.typevars import fill_typevars
 from mypy.util import get_unique_redefinition_name
 
@@ -141,7 +141,7 @@ def find_shallow_matching_overload_item(overload: Overloaded, call: CallExpr) ->
                         break
                 elif (
                     arg_none
-                    and not is_optional(arg_type)
+                    and not is_overlapping_none(arg_type)
                     and not (
                         isinstance(arg_type, Instance)
                         and arg_type.type.fullname == "builtins.object"
diff --git a/mypy/suggestions.py b/mypy/suggestions.py
index 8e1225f00a2f..268f3032fc9b 100644
--- a/mypy/suggestions.py
+++ b/mypy/suggestions.py
@@ -79,7 +79,7 @@
     UnionType,
     get_proper_type,
 )
-from mypy.types_utils import is_optional, remove_optional
+from mypy.types_utils import is_overlapping_none, remove_optional
 from mypy.util import split_target
 
 
@@ -752,7 +752,7 @@ def score_type(self, t: Type, arg_pos: bool) -> int:
                 return 20
             if any(has_any_type(x) for x in t.items):
                 return 15
-            if not is_optional(t):
+            if not is_overlapping_none(t):
                 return 10
         if isinstance(t, CallableType) and (has_any_type(t) or is_tricky_callable(t)):
             return 10
@@ -868,7 +868,7 @@ def visit_typeddict_type(self, t: TypedDictType) -> str:
         return t.fallback.accept(self)
 
     def visit_union_type(self, t: UnionType) -> str:
-        if len(t.items) == 2 and is_optional(t):
+        if len(t.items) == 2 and is_overlapping_none(t):
             return f"Optional[{remove_optional(t).accept(self)}]"
         else:
             return super().visit_union_type(t)
diff --git a/mypy/types_utils.py b/mypy/types_utils.py
index 43bca05d6bf9..7f2e38ef3753 100644
--- a/mypy/types_utils.py
+++ b/mypy/types_utils.py
@@ -101,10 +101,10 @@ def is_generic_instance(tp: Type) -> bool:
     return isinstance(tp, Instance) and bool(tp.args)
 
 
-def is_optional(t: Type) -> bool:
+def is_overlapping_none(t: Type) -> bool:
     t = get_proper_type(t)
-    return isinstance(t, UnionType) and any(
-        isinstance(get_proper_type(e), NoneType) for e in t.items
+    return isinstance(t, NoneType) or (
+        isinstance(t, UnionType) and any(isinstance(get_proper_type(e), NoneType) for e in t.items)
     )
 
 
diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test
index b763e0ff3b68..291f73a45230 100644
--- a/test-data/unit/check-narrowing.test
+++ b/test-data/unit/check-narrowing.test
@@ -1263,6 +1263,32 @@ def g() -> None:
 [builtins fixtures/dict.pyi]
 
 
+[case testNarrowingOptionalEqualsNone]
+from typing import Optional
+
+class A: ...
+
+val: Optional[A]
+
+if val == None:
+    reveal_type(val)  # N: Revealed type is "Union[__main__.A, None]"
+else:
+    reveal_type(val)  # N: Revealed type is "Union[__main__.A, None]"
+if val != None:
+    reveal_type(val)  # N: Revealed type is "Union[__main__.A, None]"
+else:
+    reveal_type(val)  # N: Revealed type is "Union[__main__.A, None]"
+
+if val in (None,):
+    reveal_type(val)  # N: Revealed type is "Union[__main__.A, None]"
+else:
+    reveal_type(val)  # N: Revealed type is "Union[__main__.A, None]"
+if val not in (None,):
+    reveal_type(val)  # N: Revealed type is "Union[__main__.A, None]"
+else:
+    reveal_type(val)  # N: Revealed type is "Union[__main__.A, None]"
+[builtins fixtures/primitives.pyi]
+
 [case testNarrowingWithTupleOfTypes]
 from typing import Tuple, Type
 
diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi
index b74252857d6f..c9b1e3f4e983 100644
--- a/test-data/unit/fixtures/primitives.pyi
+++ b/test-data/unit/fixtures/primitives.pyi
@@ -45,7 +45,8 @@ class memoryview(Sequence[int]):
     def __iter__(self) -> Iterator[int]: pass
     def __contains__(self, other: object) -> bool: pass
     def __getitem__(self, item: int) -> int: pass
-class tuple(Generic[T]): pass
+class tuple(Generic[T]):
+    def __contains__(self, other: object) -> bool: pass
 class list(Sequence[T]):
     def __iter__(self) -> Iterator[T]: pass
     def __contains__(self, other: object) -> bool: pass

From 0d708cb9c9d5291c1c988ef90a1b77307ed5315c Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 4 Aug 2023 00:17:42 +0100
Subject: [PATCH 031/288] New type inference: complete transitive closure
 (#15754)

This is a first follow-up for #15287 (I like how my PR titles sound like
research paper titles, LOL)

This PR completes the new type inference foundations by switching to a
complete and well founded algorithm [1] for transitive closure (that
replaces more ad hoc initial algorithm that covered 80% of cases and was
good for experimenting with new inference scheme). In particular the
algorithm in this PR covers two important edge cases (see tests). Some
comments:
* I don't intend to switch the default for `--new-type-inference`, I
just want to see the effect of the switch on `mypy_primer`, I will
switch back to false before merging
* This flag is still not ready to be publicly announced, I am going to
make another 2-3 PRs from the list in #15287 before making this public.
* I am not adding yet the unit tests as discussed in previous PR. This
PR is already quite big, and the next one (support for upper bounds and
values) should be much smaller. I am going to add unit tests only for
`transitive_closure()` which is the core of new logic.
* While working on this I fixed couple bugs exposed in `TypeVarTuple`
support: one is rare technical corner case, another one is serious,
template and actual where swapped during constraint inference,
effectively causing outer/return context to be completely ignored for
instances.
* It is better to review the PR with "ignore whitespace" option turned
on (there is big chunk in solve.py that is just change of indentation).
* There is one questionable design choice I am making in this PR, I am
adding `extra_tvars` as an attribute of `Constraint` class, while it
logically should not be attributed to any individual constraint, but
rather to the full list of constrains. However, doing this properly
would require changing the return type of `infer_constrains()` and all
related functions, which would be a really big refactoring.

[1] Definition 7.1 in https://inria.hal.science/inria-00073205/document

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 mypy/checker.py                       |  72 ++---
 mypy/checkexpr.py                     |  40 ++-
 mypy/constraints.py                   |  63 +++--
 mypy/expandtype.py                    |   5 +
 mypy/infer.py                         |  10 +-
 mypy/solve.py                         | 387 +++++++++++++-------------
 mypy/subtypes.py                      |   3 +-
 mypy/test/testconstraints.py          |   3 -
 mypy/test/testsolve.py                |  50 ++--
 mypy/typeops.py                       |   6 +-
 mypy_self_check.ini                   |   1 +
 test-data/unit/check-generics.test    |  20 +-
 test-data/unit/check-overloading.test |  15 +
 13 files changed, 356 insertions(+), 319 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 0c27da8b5ac8..b786155079e5 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -734,8 +734,10 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None:
                     #     def foo(x: str) -> str: ...
                     #
                     # See Python 2's map function for a concrete example of this kind of overload.
+                    current_class = self.scope.active_class()
+                    type_vars = current_class.defn.type_vars if current_class else []
                     with state.strict_optional_set(True):
-                        if is_unsafe_overlapping_overload_signatures(sig1, sig2):
+                        if is_unsafe_overlapping_overload_signatures(sig1, sig2, type_vars):
                             self.msg.overloaded_signatures_overlap(i + 1, i + j + 2, item.func)
 
             if impl_type is not None:
@@ -1702,7 +1704,9 @@ def is_unsafe_overlapping_op(
             first = forward_tweaked
             second = reverse_tweaked
 
-        return is_unsafe_overlapping_overload_signatures(first, second)
+        current_class = self.scope.active_class()
+        type_vars = current_class.defn.type_vars if current_class else []
+        return is_unsafe_overlapping_overload_signatures(first, second, type_vars)
 
     def check_inplace_operator_method(self, defn: FuncBase) -> None:
         """Check an inplace operator method such as __iadd__.
@@ -3918,11 +3922,12 @@ def is_valid_defaultdict_partial_value_type(self, t: ProperType) -> bool:
             return True
         if len(t.args) == 1:
             arg = get_proper_type(t.args[0])
-            # TODO: This is too permissive -- we only allow TypeVarType since
-            #       they leak in cases like defaultdict(list) due to a bug.
-            #       This can result in incorrect types being inferred, but only
-            #       in rare cases.
-            if isinstance(arg, (TypeVarType, UninhabitedType, NoneType)):
+            if self.options.new_type_inference:
+                allowed = isinstance(arg, (UninhabitedType, NoneType))
+            else:
+                # Allow leaked TypeVars for legacy inference logic.
+                allowed = isinstance(arg, (UninhabitedType, NoneType, TypeVarType))
+            if allowed:
                 return True
         return False
 
@@ -7179,7 +7184,7 @@ def are_argument_counts_overlapping(t: CallableType, s: CallableType) -> bool:
 
 
 def is_unsafe_overlapping_overload_signatures(
-    signature: CallableType, other: CallableType
+    signature: CallableType, other: CallableType, class_type_vars: list[TypeVarLikeType]
 ) -> bool:
     """Check if two overloaded signatures are unsafely overlapping or partially overlapping.
 
@@ -7198,8 +7203,8 @@ def is_unsafe_overlapping_overload_signatures(
     # This lets us identify cases where the two signatures use completely
     # incompatible types -- e.g. see the testOverloadingInferUnionReturnWithMixedTypevars
     # test case.
-    signature = detach_callable(signature)
-    other = detach_callable(other)
+    signature = detach_callable(signature, class_type_vars)
+    other = detach_callable(other, class_type_vars)
 
     # Note: We repeat this check twice in both directions due to a slight
     # asymmetry in 'is_callable_compatible'. When checking for partial overlaps,
@@ -7230,7 +7235,7 @@ def is_unsafe_overlapping_overload_signatures(
     )
 
 
-def detach_callable(typ: CallableType) -> CallableType:
+def detach_callable(typ: CallableType, class_type_vars: list[TypeVarLikeType]) -> CallableType:
     """Ensures that the callable's type variables are 'detached' and independent of the context.
 
     A callable normally keeps track of the type variables it uses within its 'variables' field.
@@ -7240,42 +7245,17 @@ def detach_callable(typ: CallableType) -> CallableType:
     This function will traverse the callable and find all used type vars and add them to the
     variables field if it isn't already present.
 
-    The caller can then unify on all type variables whether or not the callable is originally
-    from a class or not."""
-    type_list = typ.arg_types + [typ.ret_type]
-
-    appear_map: dict[str, list[int]] = {}
-    for i, inner_type in enumerate(type_list):
-        typevars_available = get_type_vars(inner_type)
-        for var in typevars_available:
-            if var.fullname not in appear_map:
-                appear_map[var.fullname] = []
-            appear_map[var.fullname].append(i)
-
-    used_type_var_names = set()
-    for var_name, appearances in appear_map.items():
-        used_type_var_names.add(var_name)
-
-    all_type_vars = get_type_vars(typ)
-    new_variables = []
-    for var in set(all_type_vars):
-        if var.fullname not in used_type_var_names:
-            continue
-        new_variables.append(
-            TypeVarType(
-                name=var.name,
-                fullname=var.fullname,
-                id=var.id,
-                values=var.values,
-                upper_bound=var.upper_bound,
-                default=var.default,
-                variance=var.variance,
-            )
-        )
-    out = typ.copy_modified(
-        variables=new_variables, arg_types=type_list[:-1], ret_type=type_list[-1]
+    The caller can then unify on all type variables whether the callable is originally from
+    the class or not."""
+    if not class_type_vars:
+        # Fast path, nothing to update.
+        return typ
+    seen_type_vars = set()
+    for t in typ.arg_types + [typ.ret_type]:
+        seen_type_vars |= set(get_type_vars(t))
+    return typ.copy_modified(
+        variables=list(typ.variables) + [tv for tv in class_type_vars if tv in seen_type_vars]
     )
-    return out
 
 
 def overload_can_never_match(signature: CallableType, other: CallableType) -> bool:
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 114cde8327e0..9e46d9ee39cb 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -1857,7 +1857,7 @@ def infer_function_type_arguments_using_context(
             #        expects_literal(identity(3))  # Should type-check
             if not is_generic_instance(ctx) and not is_literal_type_like(ctx):
                 return callable.copy_modified()
-        args = infer_type_arguments(callable.type_var_ids(), ret_type, erased_ctx)
+        args = infer_type_arguments(callable.variables, ret_type, erased_ctx)
         # Only substitute non-Uninhabited and non-erased types.
         new_args: list[Type | None] = []
         for arg in args:
@@ -1906,7 +1906,7 @@ def infer_function_type_arguments(
                 else:
                     pass1_args.append(arg)
 
-            inferred_args = infer_function_type_arguments(
+            inferred_args, _ = infer_function_type_arguments(
                 callee_type,
                 pass1_args,
                 arg_kinds,
@@ -1948,7 +1948,7 @@ def infer_function_type_arguments(
                 # variables while allowing for polymorphic solutions, i.e. for solutions
                 # potentially involving free variables.
                 # TODO: support the similar inference for return type context.
-                poly_inferred_args = infer_function_type_arguments(
+                poly_inferred_args, free_vars = infer_function_type_arguments(
                     callee_type,
                     arg_types,
                     arg_kinds,
@@ -1957,30 +1957,28 @@ def infer_function_type_arguments(
                     strict=self.chk.in_checked_function(),
                     allow_polymorphic=True,
                 )
-                for i, pa in enumerate(get_proper_types(poly_inferred_args)):
-                    if isinstance(pa, (NoneType, UninhabitedType)) or has_erased_component(pa):
-                        # Indicate that free variables should not be applied in the call below.
-                        poly_inferred_args[i] = None
                 poly_callee_type = self.apply_generic_arguments(
                     callee_type, poly_inferred_args, context
                 )
-                yes_vars = poly_callee_type.variables
-                no_vars = {v for v in callee_type.variables if v not in poly_callee_type.variables}
-                if not set(get_type_vars(poly_callee_type)) & no_vars:
-                    # Try applying inferred polymorphic type if possible, e.g. Callable[[T], T] can
-                    # be interpreted as def [T] (T) -> T, but dict[T, T] cannot be expressed.
-                    applied = apply_poly(poly_callee_type, yes_vars)
-                    if applied is not None and poly_inferred_args != [UninhabitedType()] * len(
-                        poly_inferred_args
-                    ):
-                        freeze_all_type_vars(applied)
-                        return applied
+                # Try applying inferred polymorphic type if possible, e.g. Callable[[T], T] can
+                # be interpreted as def [T] (T) -> T, but dict[T, T] cannot be expressed.
+                applied = apply_poly(poly_callee_type, free_vars)
+                if applied is not None and all(
+                    a is not None and not isinstance(get_proper_type(a), UninhabitedType)
+                    for a in poly_inferred_args
+                ):
+                    freeze_all_type_vars(applied)
+                    return applied
                 # If it didn't work, erase free variables as <nothing>, to avoid confusing errors.
+                unknown = UninhabitedType()
+                unknown.ambiguous = True
                 inferred_args = [
-                    expand_type(a, {v.id: UninhabitedType() for v in callee_type.variables})
+                    expand_type(
+                        a, {v.id: unknown for v in list(callee_type.variables) + free_vars}
+                    )
                     if a is not None
                     else None
-                    for a in inferred_args
+                    for a in poly_inferred_args
                 ]
         else:
             # In dynamically typed functions use implicit 'Any' types for
@@ -2019,7 +2017,7 @@ def infer_function_type_arguments_pass2(
 
         arg_types = self.infer_arg_types_in_context(callee_type, args, arg_kinds, formal_to_actual)
 
-        inferred_args = infer_function_type_arguments(
+        inferred_args, _ = infer_function_type_arguments(
             callee_type,
             arg_types,
             arg_kinds,
diff --git a/mypy/constraints.py b/mypy/constraints.py
index f9124630a706..299c6292a259 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -73,6 +73,10 @@ def __init__(self, type_var: TypeVarLikeType, op: int, target: Type) -> None:
         self.op = op
         self.target = target
         self.origin_type_var = type_var
+        # These are additional type variables that should be solved for together with type_var.
+        # TODO: A cleaner solution may be to modify the return type of infer_constraints()
+        # to include these instead, but this is a rather big refactoring.
+        self.extra_tvars: list[TypeVarLikeType] = []
 
     def __repr__(self) -> str:
         op_str = "<:"
@@ -168,7 +172,9 @@ def infer_constraints_for_callable(
     return constraints
 
 
-def infer_constraints(template: Type, actual: Type, direction: int) -> list[Constraint]:
+def infer_constraints(
+    template: Type, actual: Type, direction: int, skip_neg_op: bool = False
+) -> list[Constraint]:
     """Infer type constraints.
 
     Match a template type, which may contain type variable references,
@@ -187,7 +193,9 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons
       ((T, S), (X, Y))  -->  T :> X and S :> Y
       (X[T], Any)       -->  T <: Any and T :> Any
 
-    The constraints are represented as Constraint objects.
+    The constraints are represented as Constraint objects. If skip_neg_op == True,
+    then skip adding reverse (polymorphic) constraints (since this is already a call
+    to infer such constraints).
     """
     if any(
         get_proper_type(template) == get_proper_type(t)
@@ -202,13 +210,15 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons
             # Return early on an empty branch.
             return []
         type_state.inferring.append((template, actual))
-        res = _infer_constraints(template, actual, direction)
+        res = _infer_constraints(template, actual, direction, skip_neg_op)
         type_state.inferring.pop()
         return res
-    return _infer_constraints(template, actual, direction)
+    return _infer_constraints(template, actual, direction, skip_neg_op)
 
 
-def _infer_constraints(template: Type, actual: Type, direction: int) -> list[Constraint]:
+def _infer_constraints(
+    template: Type, actual: Type, direction: int, skip_neg_op: bool
+) -> list[Constraint]:
     orig_template = template
     template = get_proper_type(template)
     actual = get_proper_type(actual)
@@ -284,7 +294,7 @@ def _infer_constraints(template: Type, actual: Type, direction: int) -> list[Con
         return []
 
     # Remaining cases are handled by ConstraintBuilderVisitor.
-    return template.accept(ConstraintBuilderVisitor(actual, direction))
+    return template.accept(ConstraintBuilderVisitor(actual, direction, skip_neg_op))
 
 
 def infer_constraints_if_possible(
@@ -510,10 +520,14 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
     # TODO: The value may be None. Is that actually correct?
     actual: ProperType
 
-    def __init__(self, actual: ProperType, direction: int) -> None:
+    def __init__(self, actual: ProperType, direction: int, skip_neg_op: bool) -> None:
         # Direction must be SUBTYPE_OF or SUPERTYPE_OF.
         self.actual = actual
         self.direction = direction
+        # Whether to skip polymorphic inference (involves inference in opposite direction)
+        # this is used to prevent infinite recursion when both template and actual are
+        # generic callables.
+        self.skip_neg_op = skip_neg_op
 
     # Trivial leaf types
 
@@ -648,13 +662,13 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                     assert mapped.type.type_var_tuple_prefix is not None
                     assert mapped.type.type_var_tuple_suffix is not None
 
-                    unpack_constraints, mapped_args, instance_args = build_constraints_for_unpack(
-                        mapped.args,
-                        mapped.type.type_var_tuple_prefix,
-                        mapped.type.type_var_tuple_suffix,
+                    unpack_constraints, instance_args, mapped_args = build_constraints_for_unpack(
                         instance.args,
                         instance.type.type_var_tuple_prefix,
                         instance.type.type_var_tuple_suffix,
+                        mapped.args,
+                        mapped.type.type_var_tuple_prefix,
+                        mapped.type.type_var_tuple_suffix,
                         self.direction,
                     )
                     res.extend(unpack_constraints)
@@ -879,6 +893,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
         # Note that non-normalized callables can be created in annotations
         # using e.g. callback protocols.
         template = template.with_unpacked_kwargs()
+        extra_tvars = False
         if isinstance(self.actual, CallableType):
             res: list[Constraint] = []
             cactual = self.actual.with_unpacked_kwargs()
@@ -890,6 +905,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                     type_state.infer_polymorphic
                     and cactual.variables
                     and cactual.param_spec() is None
+                    and not self.skip_neg_op
                     # Technically, the correct inferred type for application of e.g.
                     # Callable[..., T] -> Callable[..., T] (with literal ellipsis), to a generic
                     # like U -> U, should be Callable[..., Any], but if U is a self-type, we can
@@ -897,18 +913,15 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                     # depends on this old behaviour.
                     and not any(tv.id.raw_id == 0 for tv in cactual.variables)
                 ):
-                    # If actual is generic, unify it with template. Note: this is
-                    # not an ideal solution (which would be adding the generic variables
-                    # to the constraint inference set), but it's a good first approximation,
-                    # and this will prevent leaking these variables in the solutions.
-                    # Note: this may infer constraints like T <: S or T <: List[S]
-                    # that contain variables in the target.
-                    unified = mypy.subtypes.unify_generic_callable(
-                        cactual, template, ignore_return=True
+                    # If the actual callable is generic, infer constraints in the opposite
+                    # direction, and indicate to the solver there are extra type variables
+                    # to solve for (see more details in mypy/solve.py).
+                    res.extend(
+                        infer_constraints(
+                            cactual, template, neg_op(self.direction), skip_neg_op=True
+                        )
                     )
-                    if unified is not None:
-                        cactual = unified
-                        res.extend(infer_constraints(cactual, template, neg_op(self.direction)))
+                    extra_tvars = True
 
                 # We can't infer constraints from arguments if the template is Callable[..., T]
                 # (with literal '...').
@@ -978,6 +991,9 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                 cactual_ret_type = cactual.type_guard
 
             res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction))
+            if extra_tvars:
+                for c in res:
+                    c.extra_tvars = list(cactual.variables)
             return res
         elif isinstance(self.actual, AnyType):
             param_spec = template.param_spec()
@@ -1205,6 +1221,9 @@ def find_and_build_constraints_for_unpack(
 
 
 def build_constraints_for_unpack(
+    # TODO: this naming is misleading, these should be "actual", not "mapped"
+    # both template and actual can be mapped before, depending on direction.
+    # Also the convention is to put template related args first.
     mapped: tuple[Type, ...],
     mapped_prefix_len: int | None,
     mapped_suffix_len: int | None,
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 83d9bf4c8725..b599b49e4c12 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -272,6 +272,11 @@ def visit_param_spec(self, t: ParamSpecType) -> Type:
             return repl
 
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type:
+        # Sometimes solver may need to expand a type variable with (a copy of) itself
+        # (usually together with other TypeVars, but it is hard to filter out TypeVarTuples).
+        repl = self.variables[t.id]
+        if isinstance(repl, TypeVarTupleType):
+            return repl
         raise NotImplementedError
 
     def visit_unpack_type(self, t: UnpackType) -> Type:
diff --git a/mypy/infer.py b/mypy/infer.py
index 66ca4169e2ff..f34087910e4b 100644
--- a/mypy/infer.py
+++ b/mypy/infer.py
@@ -12,7 +12,7 @@
 )
 from mypy.nodes import ArgKind
 from mypy.solve import solve_constraints
-from mypy.types import CallableType, Instance, Type, TypeVarId
+from mypy.types import CallableType, Instance, Type, TypeVarLikeType
 
 
 class ArgumentInferContext(NamedTuple):
@@ -37,7 +37,7 @@ def infer_function_type_arguments(
     context: ArgumentInferContext,
     strict: bool = True,
     allow_polymorphic: bool = False,
-) -> list[Type | None]:
+) -> tuple[list[Type | None], list[TypeVarLikeType]]:
     """Infer the type arguments of a generic function.
 
     Return an array of lower bound types for the type variables -1 (at
@@ -57,14 +57,14 @@ def infer_function_type_arguments(
     )
 
     # Solve constraints.
-    type_vars = callee_type.type_var_ids()
+    type_vars = callee_type.variables
     return solve_constraints(type_vars, constraints, strict, allow_polymorphic)
 
 
 def infer_type_arguments(
-    type_var_ids: list[TypeVarId], template: Type, actual: Type, is_supertype: bool = False
+    type_vars: Sequence[TypeVarLikeType], template: Type, actual: Type, is_supertype: bool = False
 ) -> list[Type | None]:
     # Like infer_function_type_arguments, but only match a single type
     # against a generic type.
     constraints = infer_constraints(template, actual, SUPERTYPE_OF if is_supertype else SUBTYPE_OF)
-    return solve_constraints(type_var_ids, constraints)
+    return solve_constraints(type_vars, constraints)[0]
diff --git a/mypy/solve.py b/mypy/solve.py
index 6693d66f3479..02df90aff1e1 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -2,9 +2,11 @@
 
 from __future__ import annotations
 
-from typing import Iterable
+from collections import defaultdict
+from typing import Iterable, Sequence
+from typing_extensions import TypeAlias as _TypeAlias
 
-from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, neg_op
+from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints, neg_op
 from mypy.expandtype import expand_type
 from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort
 from mypy.join import join_types
@@ -17,6 +19,7 @@
     Type,
     TypeOfAny,
     TypeVarId,
+    TypeVarLikeType,
     TypeVarType,
     UninhabitedType,
     UnionType,
@@ -25,45 +28,72 @@
 )
 from mypy.typestate import type_state
 
+Bounds: _TypeAlias = "dict[TypeVarId, set[Type]]"
+Graph: _TypeAlias = "set[tuple[TypeVarId, TypeVarId]]"
+Solutions: _TypeAlias = "dict[TypeVarId, Type | None]"
+
 
 def solve_constraints(
-    vars: list[TypeVarId],
+    original_vars: Sequence[TypeVarLikeType],
     constraints: list[Constraint],
     strict: bool = True,
     allow_polymorphic: bool = False,
-) -> list[Type | None]:
+) -> tuple[list[Type | None], list[TypeVarLikeType]]:
     """Solve type constraints.
 
-    Return the best type(s) for type variables; each type can be None if the value of the variable
-    could not be solved.
+    Return the best type(s) for type variables; each type can be None if the value of
+    the variable could not be solved.
 
     If a variable has no constraints, if strict=True then arbitrarily
-    pick NoneType as the value of the type variable.  If strict=False,
-    pick AnyType.
+    pick UninhabitedType as the value of the type variable. If strict=False, pick AnyType.
+    If allow_polymorphic=True, then use the full algorithm that can potentially return
+    free type variables in solutions (these require special care when applying). Otherwise,
+    use a simplified algorithm that just solves each type variable individually if possible.
     """
+    vars = [tv.id for tv in original_vars]
     if not vars:
-        return []
+        return [], []
+
+    originals = {tv.id: tv for tv in original_vars}
+    extra_vars: list[TypeVarId] = []
+    # Get additional type variables from generic actuals.
+    for c in constraints:
+        extra_vars.extend([v.id for v in c.extra_tvars if v.id not in vars + extra_vars])
+        originals.update({v.id: v for v in c.extra_tvars if v.id not in originals})
     if allow_polymorphic:
         # Constraints like T :> S and S <: T are semantically the same, but they are
         # represented differently. Normalize the constraint list w.r.t this equivalence.
-        constraints = normalize_constraints(constraints, vars)
+        constraints = normalize_constraints(constraints, vars + extra_vars)
 
     # Collect a list of constraints for each type variable.
-    cmap: dict[TypeVarId, list[Constraint]] = {tv: [] for tv in vars}
+    cmap: dict[TypeVarId, list[Constraint]] = {tv: [] for tv in vars + extra_vars}
     for con in constraints:
-        if con.type_var in vars:
+        if con.type_var in vars + extra_vars:
             cmap[con.type_var].append(con)
 
     if allow_polymorphic:
-        solutions = solve_non_linear(vars, constraints, cmap)
+        if constraints:
+            solutions, free_vars = solve_with_dependent(
+                vars + extra_vars, constraints, vars, originals
+            )
+        else:
+            solutions = {}
+            free_vars = []
     else:
         solutions = {}
+        free_vars = []
         for tv, cs in cmap.items():
             if not cs:
                 continue
             lowers = [c.target for c in cs if c.op == SUPERTYPE_OF]
             uppers = [c.target for c in cs if c.op == SUBTYPE_OF]
-            solutions[tv] = solve_one(lowers, uppers, [])
+            solution = solve_one(lowers, uppers)
+
+            # Do not leak type variables in non-polymorphic solutions.
+            if solution is None or not get_vars(
+                solution, [tv for tv in extra_vars if tv not in vars]
+            ):
+                solutions[tv] = solution
 
     res: list[Type | None] = []
     for v in vars:
@@ -78,129 +108,128 @@ def solve_constraints(
             else:
                 candidate = AnyType(TypeOfAny.special_form)
             res.append(candidate)
-    return res
+    return res, [originals[tv] for tv in free_vars]
 
 
-def solve_non_linear(
-    vars: list[TypeVarId], constraints: list[Constraint], cmap: dict[TypeVarId, list[Constraint]]
-) -> dict[TypeVarId, Type | None]:
-    """Solve set of constraints that may include non-linear ones, like T <: List[S].
+def solve_with_dependent(
+    vars: list[TypeVarId],
+    constraints: list[Constraint],
+    original_vars: list[TypeVarId],
+    originals: dict[TypeVarId, TypeVarLikeType],
+) -> tuple[Solutions, list[TypeVarId]]:
+    """Solve set of constraints that may depend on each other, like T <: List[S].
 
     The whole algorithm consists of five steps:
-      * Propagate via linear constraints to get all possible constraints for each variable
+      * Propagate via linear constraints and use secondary constraints to get transitive closure
       * Find dependencies between type variables, group them in SCCs, and sort topologically
-      * Check all SCC are intrinsically linear, we can't solve (express) T <: List[T]
+      * Check that all SCC are intrinsically linear, we can't solve (express) T <: List[T]
       * Variables in leaf SCCs that don't have constant bounds are free (choose one per SCC)
-      * Solve constraints iteratively starting from leafs, updating targets after each step.
+      * Solve constraints iteratively starting from leafs, updating bounds after each step.
     """
-    extra_constraints = []
-    for tvar in vars:
-        extra_constraints.extend(propagate_constraints_for(tvar, SUBTYPE_OF, cmap))
-        extra_constraints.extend(propagate_constraints_for(tvar, SUPERTYPE_OF, cmap))
-    constraints += remove_dups(extra_constraints)
-
-    # Recompute constraint map after propagating.
-    cmap = {tv: [] for tv in vars}
-    for con in constraints:
-        if con.type_var in vars:
-            cmap[con.type_var].append(con)
+    graph, lowers, uppers = transitive_closure(vars, constraints)
 
-    dmap = compute_dependencies(cmap)
+    dmap = compute_dependencies(vars, graph, lowers, uppers)
     sccs = list(strongly_connected_components(set(vars), dmap))
-    if all(check_linear(scc, cmap) for scc in sccs):
-        raw_batches = list(topsort(prepare_sccs(sccs, dmap)))
-        leafs = raw_batches[0]
-        free_vars = []
-        for scc in leafs:
-            # If all constrain targets in this SCC are type variables within the
-            # same SCC then the only meaningful solution we can express, is that
-            # each variable is equal to a new free variable. For example if we
-            # have T <: S, S <: U, we deduce: T = S = U = <free>.
-            if all(
-                isinstance(c.target, TypeVarType) and c.target.id in vars
-                for tv in scc
-                for c in cmap[tv]
-            ):
-                # For convenience with current type application machinery, we randomly
-                # choose one of the existing type variables in SCC and designate it as free
-                # instead of defining a new type variable as a common solution.
-                # TODO: be careful about upper bounds (or values) when introducing free vars.
-                free_vars.append(sorted(scc, key=lambda x: x.raw_id)[0])
-
-        # Flatten the SCCs that are independent, we can solve them together,
-        # since we don't need to update any targets in between.
-        batches = []
-        for batch in raw_batches:
-            next_bc = []
-            for scc in batch:
-                next_bc.extend(list(scc))
-            batches.append(next_bc)
-
-        solutions: dict[TypeVarId, Type | None] = {}
-        for flat_batch in batches:
-            solutions.update(solve_iteratively(flat_batch, cmap, free_vars))
-        # We remove the solutions like T = T for free variables. This will indicate
-        # to the apply function, that they should not be touched.
-        # TODO: return list of free type variables explicitly, this logic is fragile
-        # (but if we do, we need to be careful everything works in incremental modes).
-        for tv in free_vars:
-            if tv in solutions:
-                del solutions[tv]
-        return solutions
-    return {}
+    if not all(check_linear(scc, lowers, uppers) for scc in sccs):
+        return {}, []
+    raw_batches = list(topsort(prepare_sccs(sccs, dmap)))
+
+    free_vars = []
+    for scc in raw_batches[0]:
+        # If there are no bounds on this SCC, then the only meaningful solution we can
+        # express, is that each variable is equal to a new free variable. For example,
+        # if we have T <: S, S <: U, we deduce: T = S = U = <free>.
+        if all(not lowers[tv] and not uppers[tv] for tv in scc):
+            # For convenience with current type application machinery, we use a stable
+            # choice that prefers the original type variables (not polymorphic ones) in SCC.
+            # TODO: be careful about upper bounds (or values) when introducing free vars.
+            free_vars.append(sorted(scc, key=lambda x: (x not in original_vars, x.raw_id))[0])
+
+    # Update lowers/uppers with free vars, so these can now be used
+    # as valid solutions.
+    for l, u in graph.copy():
+        if l in free_vars:
+            lowers[u].add(originals[l])
+        if u in free_vars:
+            uppers[l].add(originals[u])
+
+    # Flatten the SCCs that are independent, we can solve them together,
+    # since we don't need to update any targets in between.
+    batches = []
+    for batch in raw_batches:
+        next_bc = []
+        for scc in batch:
+            next_bc.extend(list(scc))
+        batches.append(next_bc)
+
+    solutions: dict[TypeVarId, Type | None] = {}
+    for flat_batch in batches:
+        res = solve_iteratively(flat_batch, graph, lowers, uppers)
+        solutions.update(res)
+    return solutions, free_vars
 
 
 def solve_iteratively(
-    batch: list[TypeVarId], cmap: dict[TypeVarId, list[Constraint]], free_vars: list[TypeVarId]
-) -> dict[TypeVarId, Type | None]:
-    """Solve constraints sequentially, updating constraint targets after each step.
-
-    We solve for type variables that appear in `batch`. If a constraint target is not constant
-    (i.e. constraint looks like T :> F[S, ...]), we substitute solutions found so far in
-    the target F[S, ...].  This way we can gradually solve for all variables in the batch taking
-    one solvable variable at a time (i.e. such a variable that has at least one constant bound).
-
-    Importantly, variables in free_vars are considered constants, so for example if we have just
-    one initial constraint T <: List[S], we will have two SCCs {T} and {S}, then we first
-    designate S as free, and therefore T = List[S] is a valid solution for T.
+    batch: list[TypeVarId], graph: Graph, lowers: Bounds, uppers: Bounds
+) -> Solutions:
+    """Solve transitive closure sequentially, updating upper/lower bounds after each step.
+
+    Transitive closure is represented as a linear graph plus lower/upper bounds for each
+    type variable, see transitive_closure() docstring for details.
+
+    We solve for type variables that appear in `batch`. If a bound is not constant (i.e. it
+    looks like T :> F[S, ...]), we substitute solutions found so far in the target F[S, ...]
+    after solving the batch.
+
+    Importantly, after solving each variable in a batch, we move it from linear graph to
+    upper/lower bounds, this way we can guarantee consistency of solutions (see comment below
+    for an example when this is important).
     """
     solutions = {}
-    relevant_constraints = []
-    for tv in batch:
-        relevant_constraints.extend(cmap.get(tv, []))
-    lowers, uppers = transitive_closure(batch, relevant_constraints)
     s_batch = set(batch)
-    not_allowed_vars = [v for v in batch if v not in free_vars]
     while s_batch:
-        for tv in s_batch:
-            if any(not get_vars(l, not_allowed_vars) for l in lowers[tv]) or any(
-                not get_vars(u, not_allowed_vars) for u in uppers[tv]
-            ):
+        for tv in sorted(s_batch, key=lambda x: x.raw_id):
+            if lowers[tv] or uppers[tv]:
                 solvable_tv = tv
                 break
         else:
             break
         # Solve each solvable type variable separately.
         s_batch.remove(solvable_tv)
-        result = solve_one(lowers[solvable_tv], uppers[solvable_tv], not_allowed_vars)
+        result = solve_one(lowers[solvable_tv], uppers[solvable_tv])
         solutions[solvable_tv] = result
         if result is None:
-            # TODO: support backtracking lower/upper bound choices
+            # TODO: support backtracking lower/upper bound choices and order within SCCs.
             # (will require switching this function from iterative to recursive).
             continue
-        # Update the (transitive) constraints if there is a solution.
-        subs = {solvable_tv: result}
-        lowers = {tv: {expand_type(l, subs) for l in lowers[tv]} for tv in lowers}
-        uppers = {tv: {expand_type(u, subs) for u in uppers[tv]} for tv in uppers}
-        for v in cmap:
-            for c in cmap[v]:
-                c.target = expand_type(c.target, subs)
+
+        # Update the (transitive) bounds from graph if there is a solution.
+        # This is needed to guarantee solutions will never contradict the initial
+        # constraints. For example, consider {T <: S, T <: A, S :> B} with A :> B.
+        # If we would not update the uppers/lowers from graph, we would infer T = A, S = B
+        # which is not correct.
+        for l, u in graph.copy():
+            if l == u:
+                continue
+            if l == solvable_tv:
+                lowers[u].add(result)
+                graph.remove((l, u))
+            if u == solvable_tv:
+                uppers[l].add(result)
+                graph.remove((l, u))
+
+    # We can update uppers/lowers only once after solving the whole SCC,
+    # since uppers/lowers can't depend on type variables in the SCC
+    # (and we would reject such SCC as non-linear and therefore not solvable).
+    subs = {tv: s for (tv, s) in solutions.items() if s is not None}
+    for tv in lowers:
+        lowers[tv] = {expand_type(lt, subs) for lt in lowers[tv]}
+    for tv in uppers:
+        uppers[tv] = {expand_type(ut, subs) for ut in uppers[tv]}
     return solutions
 
 
-def solve_one(
-    lowers: Iterable[Type], uppers: Iterable[Type], not_allowed_vars: list[TypeVarId]
-) -> Type | None:
+def solve_one(lowers: Iterable[Type], uppers: Iterable[Type]) -> Type | None:
     """Solve constraints by finding by using meets of upper bounds, and joins of lower bounds."""
     bottom: Type | None = None
     top: Type | None = None
@@ -210,10 +239,6 @@ def solve_one(
     # bounds based on constraints. Note that we assume that the constraint
     # targets do not have constraint references.
     for target in lowers:
-        # There may be multiple steps needed to solve all vars within a
-        # (linear) SCC. We ignore targets pointing to not yet solved vars.
-        if get_vars(target, not_allowed_vars):
-            continue
         if bottom is None:
             bottom = target
         else:
@@ -225,9 +250,6 @@ def solve_one(
                 bottom = join_types(bottom, target)
 
     for target in uppers:
-        # Same as above.
-        if get_vars(target, not_allowed_vars):
-            continue
         if top is None:
             top = target
         else:
@@ -262,6 +284,7 @@ def normalize_constraints(
     This includes two things currently:
       * Complement T :> S by S <: T
       * Remove strict duplicates
+      * Remove constrains for unrelated variables
     """
     res = constraints.copy()
     for c in constraints:
@@ -270,96 +293,81 @@ def normalize_constraints(
     return [c for c in remove_dups(constraints) if c.type_var in vars]
 
 
-def propagate_constraints_for(
-    var: TypeVarId, direction: int, cmap: dict[TypeVarId, list[Constraint]]
-) -> list[Constraint]:
-    """Propagate via linear constraints to get additional constraints for `var`.
-
-    For example if we have constraints:
-        [T <: int, S <: T, S :> str]
-    we can add two more
-        [S <: int, T :> str]
-    """
-    extra_constraints = []
-    seen = set()
-    front = [var]
-    if cmap[var]:
-        var_def = cmap[var][0].origin_type_var
-    else:
-        return []
-    while front:
-        tv = front.pop(0)
-        for c in cmap[tv]:
-            if (
-                isinstance(c.target, TypeVarType)
-                and c.target.id not in seen
-                and c.target.id in cmap
-                and c.op == direction
-            ):
-                front.append(c.target.id)
-                seen.add(c.target.id)
-            elif c.op == direction:
-                new_c = Constraint(var_def, direction, c.target)
-                if new_c not in cmap[var]:
-                    extra_constraints.append(new_c)
-    return extra_constraints
-
-
 def transitive_closure(
     tvars: list[TypeVarId], constraints: list[Constraint]
-) -> tuple[dict[TypeVarId, set[Type]], dict[TypeVarId, set[Type]]]:
+) -> tuple[Graph, Bounds, Bounds]:
     """Find transitive closure for given constraints on type variables.
 
     Transitive closure gives maximal set of lower/upper bounds for each type variable,
     such that we cannot deduce any further bounds by chaining other existing bounds.
 
+    The transitive closure is represented by:
+      * A set of lower and upper bounds for each type variable, where only constant and
+        non-linear terms are included in the bounds.
+      * A graph of linear constraints between type variables (represented as a set of pairs)
+    Such separation simplifies reasoning, and allows an efficient and simple incremental
+    transitive closure algorithm that we use here.
+
     For example if we have initial constraints [T <: S, S <: U, U <: int], the transitive
     closure is given by:
-      * {} <: T <: {S, U, int}
-      * {T} <: S <: {U, int}
-      * {T, S} <: U <: {int}
+      * {} <: T <: {int}
+      * {} <: S <: {int}
+      * {} <: U <: {int}
+      * {T <: S, S <: U, T <: U}
     """
-    # TODO: merge propagate_constraints_for() into this function.
-    # TODO: add secondary constraints here to make the algorithm complete.
-    uppers: dict[TypeVarId, set[Type]] = {tv: set() for tv in tvars}
-    lowers: dict[TypeVarId, set[Type]] = {tv: set() for tv in tvars}
-    graph: set[tuple[TypeVarId, TypeVarId]] = set()
+    uppers: Bounds = defaultdict(set)
+    lowers: Bounds = defaultdict(set)
+    graph: Graph = {(tv, tv) for tv in tvars}
 
-    # Prime the closure with the initial trivial values.
-    for c in constraints:
-        if isinstance(c.target, TypeVarType) and c.target.id in tvars:
-            if c.op == SUBTYPE_OF:
-                graph.add((c.type_var, c.target.id))
-            else:
-                graph.add((c.target.id, c.type_var))
-        if c.op == SUBTYPE_OF:
-            uppers[c.type_var].add(c.target)
-        else:
-            lowers[c.type_var].add(c.target)
-
-    # At this stage we know that constant bounds have been propagated already, so we
-    # only need to propagate linear constraints.
-    for c in constraints:
+    remaining = set(constraints)
+    while remaining:
+        c = remaining.pop()
         if isinstance(c.target, TypeVarType) and c.target.id in tvars:
             if c.op == SUBTYPE_OF:
                 lower, upper = c.type_var, c.target.id
             else:
                 lower, upper = c.target.id, c.type_var
-            extras = {
+            if (lower, upper) in graph:
+                continue
+            graph |= {
                 (l, u) for l in tvars for u in tvars if (l, lower) in graph and (upper, u) in graph
             }
-            graph |= extras
             for u in tvars:
                 if (upper, u) in graph:
                     lowers[u] |= lowers[lower]
             for l in tvars:
                 if (l, lower) in graph:
                     uppers[l] |= uppers[upper]
-    return lowers, uppers
+            for lt in lowers[lower]:
+                for ut in uppers[upper]:
+                    # TODO: what if secondary constraints result in inference
+                    # against polymorphic actual (also in below branches)?
+                    remaining |= set(infer_constraints(lt, ut, SUBTYPE_OF))
+                    remaining |= set(infer_constraints(ut, lt, SUPERTYPE_OF))
+        elif c.op == SUBTYPE_OF:
+            if c.target in uppers[c.type_var]:
+                continue
+            for l in tvars:
+                if (l, c.type_var) in graph:
+                    uppers[l].add(c.target)
+            for lt in lowers[c.type_var]:
+                remaining |= set(infer_constraints(lt, c.target, SUBTYPE_OF))
+                remaining |= set(infer_constraints(c.target, lt, SUPERTYPE_OF))
+        else:
+            assert c.op == SUPERTYPE_OF
+            if c.target in lowers[c.type_var]:
+                continue
+            for u in tvars:
+                if (c.type_var, u) in graph:
+                    lowers[u].add(c.target)
+            for ut in uppers[c.type_var]:
+                remaining |= set(infer_constraints(ut, c.target, SUPERTYPE_OF))
+                remaining |= set(infer_constraints(c.target, ut, SUBTYPE_OF))
+    return graph, lowers, uppers
 
 
 def compute_dependencies(
-    cmap: dict[TypeVarId, list[Constraint]]
+    tvars: list[TypeVarId], graph: Graph, lowers: Bounds, uppers: Bounds
 ) -> dict[TypeVarId, list[TypeVarId]]:
     """Compute dependencies between type variables induced by constraints.
 
@@ -367,25 +375,30 @@ def compute_dependencies(
     we will need to solve for S first before we can solve for T.
     """
     res = {}
-    vars = list(cmap.keys())
-    for tv in cmap:
+    for tv in tvars:
         deps = set()
-        for c in cmap[tv]:
-            deps |= get_vars(c.target, vars)
+        for lt in lowers[tv]:
+            deps |= get_vars(lt, tvars)
+        for ut in uppers[tv]:
+            deps |= get_vars(ut, tvars)
+        for other in tvars:
+            if other == tv:
+                continue
+            if (tv, other) in graph or (other, tv) in graph:
+                deps.add(other)
         res[tv] = list(deps)
     return res
 
 
-def check_linear(scc: set[TypeVarId], cmap: dict[TypeVarId, list[Constraint]]) -> bool:
+def check_linear(scc: set[TypeVarId], lowers: Bounds, uppers: Bounds) -> bool:
     """Check there are only linear constraints between type variables in SCC.
 
     Linear are constraints like T <: S (while T <: F[S] are non-linear).
     """
     for tv in scc:
-        if any(
-            get_vars(c.target, list(scc)) and not isinstance(c.target, TypeVarType)
-            for c in cmap[tv]
-        ):
+        if any(get_vars(lt, list(scc)) for lt in lowers[tv]):
+            return False
+        if any(get_vars(ut, list(scc)) for ut in uppers[tv]):
             return False
     return True
 
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index a6dc071f92b0..5712d7375e50 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -1708,8 +1708,7 @@ def unify_generic_callable(
             type.ret_type, target.ret_type, return_constraint_direction
         )
         constraints.extend(c)
-    type_var_ids = [tvar.id for tvar in type.variables]
-    inferred_vars = mypy.solve.solve_constraints(type_var_ids, constraints)
+    inferred_vars, _ = mypy.solve.solve_constraints(type.variables, constraints)
     if None in inferred_vars:
         return None
     non_none_inferred_vars = cast(List[Type], inferred_vars)
diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py
index b46f31327150..f40996145cba 100644
--- a/mypy/test/testconstraints.py
+++ b/mypy/test/testconstraints.py
@@ -1,7 +1,5 @@
 from __future__ import annotations
 
-import pytest
-
 from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints
 from mypy.test.helpers import Suite
 from mypy.test.typefixture import TypeFixture
@@ -22,7 +20,6 @@ def test_basic_type_variable(self) -> None:
                 Constraint(type_var=fx.t, op=direction, target=fx.a)
             ]
 
-    @pytest.mark.xfail
     def test_basic_type_var_tuple_subtype(self) -> None:
         fx = self.fx
         assert infer_constraints(
diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py
index d6c585ef4aaa..5d67203dbbf5 100644
--- a/mypy/test/testsolve.py
+++ b/mypy/test/testsolve.py
@@ -6,7 +6,7 @@
 from mypy.solve import solve_constraints
 from mypy.test.helpers import Suite, assert_equal
 from mypy.test.typefixture import TypeFixture
-from mypy.types import Type, TypeVarId, TypeVarType
+from mypy.types import Type, TypeVarLikeType, TypeVarType
 
 
 class SolveSuite(Suite):
@@ -17,26 +17,24 @@ def test_empty_input(self) -> None:
         self.assert_solve([], [], [])
 
     def test_simple_supertype_constraints(self) -> None:
+        self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.a)], [(self.fx.a, self.fx.o)])
         self.assert_solve(
-            [self.fx.t.id], [self.supc(self.fx.t, self.fx.a)], [(self.fx.a, self.fx.o)]
-        )
-        self.assert_solve(
-            [self.fx.t.id],
+            [self.fx.t],
             [self.supc(self.fx.t, self.fx.a), self.supc(self.fx.t, self.fx.b)],
             [(self.fx.a, self.fx.o)],
         )
 
     def test_simple_subtype_constraints(self) -> None:
-        self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.a)], [self.fx.a])
+        self.assert_solve([self.fx.t], [self.subc(self.fx.t, self.fx.a)], [self.fx.a])
         self.assert_solve(
-            [self.fx.t.id],
+            [self.fx.t],
             [self.subc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)],
             [self.fx.b],
         )
 
     def test_both_kinds_of_constraints(self) -> None:
         self.assert_solve(
-            [self.fx.t.id],
+            [self.fx.t],
             [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.a)],
             [(self.fx.b, self.fx.a)],
         )
@@ -44,21 +42,19 @@ def test_both_kinds_of_constraints(self) -> None:
     def test_unsatisfiable_constraints(self) -> None:
         # The constraints are impossible to satisfy.
         self.assert_solve(
-            [self.fx.t.id],
-            [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)],
-            [None],
+            [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [None]
         )
 
     def test_exactly_specified_result(self) -> None:
         self.assert_solve(
-            [self.fx.t.id],
+            [self.fx.t],
             [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.b)],
             [(self.fx.b, self.fx.b)],
         )
 
     def test_multiple_variables(self) -> None:
         self.assert_solve(
-            [self.fx.t.id, self.fx.s.id],
+            [self.fx.t, self.fx.s],
             [
                 self.supc(self.fx.t, self.fx.b),
                 self.supc(self.fx.s, self.fx.c),
@@ -68,40 +64,38 @@ def test_multiple_variables(self) -> None:
         )
 
     def test_no_constraints_for_var(self) -> None:
-        self.assert_solve([self.fx.t.id], [], [self.fx.uninhabited])
-        self.assert_solve(
-            [self.fx.t.id, self.fx.s.id], [], [self.fx.uninhabited, self.fx.uninhabited]
-        )
+        self.assert_solve([self.fx.t], [], [self.fx.uninhabited])
+        self.assert_solve([self.fx.t, self.fx.s], [], [self.fx.uninhabited, self.fx.uninhabited])
         self.assert_solve(
-            [self.fx.t.id, self.fx.s.id],
+            [self.fx.t, self.fx.s],
             [self.supc(self.fx.s, self.fx.a)],
             [self.fx.uninhabited, (self.fx.a, self.fx.o)],
         )
 
     def test_simple_constraints_with_dynamic_type(self) -> None:
         self.assert_solve(
-            [self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]
+            [self.fx.t], [self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]
         )
         self.assert_solve(
-            [self.fx.t.id],
+            [self.fx.t],
             [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.anyt)],
             [(self.fx.anyt, self.fx.anyt)],
         )
         self.assert_solve(
-            [self.fx.t.id],
+            [self.fx.t],
             [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.a)],
             [(self.fx.anyt, self.fx.anyt)],
         )
 
         self.assert_solve(
-            [self.fx.t.id], [self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]
+            [self.fx.t], [self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]
         )
         self.assert_solve(
-            [self.fx.t.id],
+            [self.fx.t],
             [self.subc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.anyt)],
             [(self.fx.anyt, self.fx.anyt)],
         )
-        # self.assert_solve([self.fx.t.id],
+        # self.assert_solve([self.fx.t],
         #                   [self.subc(self.fx.t, self.fx.anyt),
         #                    self.subc(self.fx.t, self.fx.a)],
         #                   [(self.fx.anyt, self.fx.anyt)])
@@ -111,20 +105,20 @@ def test_both_normal_and_any_types_in_results(self) -> None:
         # If one of the bounds is any, we promote the other bound to
         # any as well, since otherwise the type range does not make sense.
         self.assert_solve(
-            [self.fx.t.id],
+            [self.fx.t],
             [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.anyt)],
             [(self.fx.anyt, self.fx.anyt)],
         )
 
         self.assert_solve(
-            [self.fx.t.id],
+            [self.fx.t],
             [self.supc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.a)],
             [(self.fx.anyt, self.fx.anyt)],
         )
 
     def assert_solve(
         self,
-        vars: list[TypeVarId],
+        vars: list[TypeVarLikeType],
         constraints: list[Constraint],
         results: list[None | Type | tuple[Type, Type]],
     ) -> None:
@@ -134,7 +128,7 @@ def assert_solve(
                 res.append(r[0])
             else:
                 res.append(r)
-        actual = solve_constraints(vars, constraints)
+        actual, _ = solve_constraints(vars, constraints)
         assert_equal(str(actual), str(res))
 
     def supc(self, type_var: TypeVarType, bound: Type) -> Constraint:
diff --git a/mypy/typeops.py b/mypy/typeops.py
index 519d3de995f5..65ab4340403c 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -313,7 +313,9 @@ class B(A): pass
         original_type = get_proper_type(original_type)
 
         all_ids = func.type_var_ids()
-        typeargs = infer_type_arguments(all_ids, self_param_type, original_type, is_supertype=True)
+        typeargs = infer_type_arguments(
+            func.variables, self_param_type, original_type, is_supertype=True
+        )
         if (
             is_classmethod
             # TODO: why do we need the extra guards here?
@@ -322,7 +324,7 @@ class B(A): pass
         ):
             # In case we call a classmethod through an instance x, fallback to type(x)
             typeargs = infer_type_arguments(
-                all_ids, self_param_type, TypeType(original_type), is_supertype=True
+                func.variables, self_param_type, TypeType(original_type), is_supertype=True
             )
 
         ids = [tid for tid in all_ids if any(tid == t.id for t in get_type_vars(self_param_type))]
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
index 7413e6407d4f..fcdbe641d6d6 100644
--- a/mypy_self_check.ini
+++ b/mypy_self_check.ini
@@ -8,6 +8,7 @@ always_false = MYPYC
 plugins = misc/proper_plugin.py
 python_version = 3.8
 exclude = mypy/typeshed/|mypyc/test-data/|mypyc/lib-rt/
+new_type_inference = True
 enable_error_code = ignore-without-code,redundant-expr
 show_error_code_links = True
 
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 34588bfceb3d..5c510a11b970 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -2911,9 +2911,23 @@ def test1(x: V) -> V: ...
 def test2(x: V, y: V) -> V: ...
 
 reveal_type(dec1(test1))  # N: Revealed type is "def () -> def [T] (T`1) -> T`1"
-# TODO: support this situation
-reveal_type(dec2(test2))  # N: Revealed type is "def (builtins.object) -> def (builtins.object) -> builtins.object"
-[builtins fixtures/paramspec.pyi]
+reveal_type(dec2(test2))  # N: Revealed type is "def [T] (T`3) -> def (T`3) -> T`3"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericCallableNewVariable]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List
+
+S = TypeVar('S')
+T = TypeVar('T')
+U = TypeVar('U')
+
+def dec(f: Callable[[S], T]) -> Callable[[S], T]:
+    ...
+def test(x: List[U]) -> List[U]:
+    ...
+reveal_type(dec(test))  # N: Revealed type is "def [U] (builtins.list[U`-1]) -> builtins.list[U`-1]"
+[builtins fixtures/list.pyi]
 
 [case testInferenceAgainstGenericCallableGenericAlias]
 # flags: --new-type-inference
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index 89e5aea210b4..50acd7d77c8c 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -6554,3 +6554,18 @@ class Snafu(object):
 reveal_type(Snafu().snafu('123'))  # N: Revealed type is "builtins.str"
 reveal_type(Snafu.snafu('123'))  # N: Revealed type is "builtins.str"
 [builtins fixtures/staticmethod.pyi]
+
+[case testOverloadedWithInternalTypeVars]
+# flags: --new-type-inference
+import m
+
+[file m.pyi]
+from typing import Callable, TypeVar, overload
+
+T = TypeVar("T")
+S = TypeVar("S", bound=str)
+
+@overload
+def foo(x: int = ...) -> Callable[[T], T]: ...
+@overload
+def foo(x: S = ...) -> Callable[[T], T]: ...

From 5617cdd03d12ff73622c8d4b496979e0377b1675 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Fri, 4 Aug 2023 16:39:11 +0200
Subject: [PATCH 032/288] Update black pre-commit mirror link (#15815)

The black pre-commit mirror is now hosted at:
https://github.com/psf/black-pre-commit-mirror
---
 .pre-commit-config.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 8ee89cbb912f..f2367f63bb3d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -5,7 +5,7 @@ repos:
     hooks:
       - id: trailing-whitespace
       - id: end-of-file-fixer
-  - repo: https://github.com/hauntsaninja/black-pre-commit-mirror
+  - repo: https://github.com/psf/black-pre-commit-mirror
     rev: 23.7.0  # must match test-requirements.txt
     hooks:
       - id: black

From 2aaeda4b84a863004a6694a7d562462fbe531ece Mon Sep 17 00:00:00 2001
From: EXPLOSION <git@helvetica.moe>
Date: Wed, 9 Aug 2023 15:17:13 +0900
Subject: [PATCH 033/288] Reconsider constraints involving parameter
 specifications (#15272)

- Fixes https://github.com/python/mypy/issues/15037
- Fixes https://github.com/python/mypy/issues/15065
- Fixes https://github.com/python/mypy/issues/15073
- Fixes https://github.com/python/mypy/issues/15388
- Fixes https://github.com/python/mypy/issues/15086

Yet another part of https://github.com/python/mypy/pull/14903 that's
finally been extracted!
---
 mypy/constraints.py                           | 129 ++++++++++++++----
 mypy/test/testconstraints.py                  |  62 +++++++++
 mypy/test/typefixture.py                      |  42 ++++++
 .../unit/check-parameter-specification.test   |  32 ++++-
 4 files changed, 241 insertions(+), 24 deletions(-)

diff --git a/mypy/constraints.py b/mypy/constraints.py
index 299c6292a259..9c55b56dd70e 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -82,15 +82,19 @@ def __repr__(self) -> str:
         op_str = "<:"
         if self.op == SUPERTYPE_OF:
             op_str = ":>"
-        return f"{self.type_var} {op_str} {self.target}"
+        return f"{self.origin_type_var} {op_str} {self.target}"
 
     def __hash__(self) -> int:
-        return hash((self.type_var, self.op, self.target))
+        return hash((self.origin_type_var, self.op, self.target))
 
     def __eq__(self, other: object) -> bool:
         if not isinstance(other, Constraint):
             return False
-        return (self.type_var, self.op, self.target) == (other.type_var, other.op, other.target)
+        return (self.origin_type_var, self.op, self.target) == (
+            other.origin_type_var,
+            other.op,
+            other.target,
+        )
 
 
 def infer_constraints_for_callable(
@@ -698,25 +702,54 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                             )
                     elif isinstance(tvar, ParamSpecType) and isinstance(mapped_arg, ParamSpecType):
                         suffix = get_proper_type(instance_arg)
+                        prefix = mapped_arg.prefix
+                        length = len(prefix.arg_types)
 
                         if isinstance(suffix, CallableType):
-                            prefix = mapped_arg.prefix
                             from_concat = bool(prefix.arg_types) or suffix.from_concatenate
                             suffix = suffix.copy_modified(from_concatenate=from_concat)
 
                         if isinstance(suffix, (Parameters, CallableType)):
                             # no such thing as variance for ParamSpecs
                             # TODO: is there a case I am missing?
-                            # TODO: constraints between prefixes
-                            prefix = mapped_arg.prefix
-                            suffix = suffix.copy_modified(
-                                suffix.arg_types[len(prefix.arg_types) :],
-                                suffix.arg_kinds[len(prefix.arg_kinds) :],
-                                suffix.arg_names[len(prefix.arg_names) :],
+                            length = min(length, len(suffix.arg_types))
+
+                            constrained_to = suffix.copy_modified(
+                                suffix.arg_types[length:],
+                                suffix.arg_kinds[length:],
+                                suffix.arg_names[length:],
+                            )
+                            constrained_from = mapped_arg.copy_modified(
+                                prefix=prefix.copy_modified(
+                                    prefix.arg_types[length:],
+                                    prefix.arg_kinds[length:],
+                                    prefix.arg_names[length:],
+                                )
                             )
-                            res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix))
+
+                            res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained_to))
+                            res.append(Constraint(constrained_from, SUBTYPE_OF, constrained_to))
                         elif isinstance(suffix, ParamSpecType):
-                            res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix))
+                            suffix_prefix = suffix.prefix
+                            length = min(length, len(suffix_prefix.arg_types))
+
+                            constrained = suffix.copy_modified(
+                                prefix=suffix_prefix.copy_modified(
+                                    suffix_prefix.arg_types[length:],
+                                    suffix_prefix.arg_kinds[length:],
+                                    suffix_prefix.arg_names[length:],
+                                )
+                            )
+                            constrained_from = mapped_arg.copy_modified(
+                                prefix=prefix.copy_modified(
+                                    prefix.arg_types[length:],
+                                    prefix.arg_kinds[length:],
+                                    prefix.arg_names[length:],
+                                )
+                            )
+
+                            res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained))
+                            res.append(Constraint(constrained_from, SUBTYPE_OF, constrained))
                     else:
                         # This case should have been handled above.
                         assert not isinstance(tvar, TypeVarTupleType)
@@ -768,26 +801,56 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                         template_arg, ParamSpecType
                     ):
                         suffix = get_proper_type(mapped_arg)
+                        prefix = template_arg.prefix
+                        length = len(prefix.arg_types)
 
                         if isinstance(suffix, CallableType):
                             prefix = template_arg.prefix
                             from_concat = bool(prefix.arg_types) or suffix.from_concatenate
                             suffix = suffix.copy_modified(from_concatenate=from_concat)
 
+                        # TODO: this is almost a copy-paste of code above: make this into a function
                         if isinstance(suffix, (Parameters, CallableType)):
                             # no such thing as variance for ParamSpecs
                             # TODO: is there a case I am missing?
-                            # TODO: constraints between prefixes
-                            prefix = template_arg.prefix
+                            length = min(length, len(suffix.arg_types))
 
-                            suffix = suffix.copy_modified(
-                                suffix.arg_types[len(prefix.arg_types) :],
-                                suffix.arg_kinds[len(prefix.arg_kinds) :],
-                                suffix.arg_names[len(prefix.arg_names) :],
+                            constrained_to = suffix.copy_modified(
+                                suffix.arg_types[length:],
+                                suffix.arg_kinds[length:],
+                                suffix.arg_names[length:],
                             )
-                            res.append(Constraint(template_arg, SUPERTYPE_OF, suffix))
+                            constrained_from = template_arg.copy_modified(
+                                prefix=prefix.copy_modified(
+                                    prefix.arg_types[length:],
+                                    prefix.arg_kinds[length:],
+                                    prefix.arg_names[length:],
+                                )
+                            )
+
+                            res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained_to))
+                            res.append(Constraint(constrained_from, SUBTYPE_OF, constrained_to))
                         elif isinstance(suffix, ParamSpecType):
-                            res.append(Constraint(template_arg, SUPERTYPE_OF, suffix))
+                            suffix_prefix = suffix.prefix
+                            length = min(length, len(suffix_prefix.arg_types))
+
+                            constrained = suffix.copy_modified(
+                                prefix=suffix_prefix.copy_modified(
+                                    suffix_prefix.arg_types[length:],
+                                    suffix_prefix.arg_kinds[length:],
+                                    suffix_prefix.arg_names[length:],
+                                )
+                            )
+                            constrained_from = template_arg.copy_modified(
+                                prefix=prefix.copy_modified(
+                                    prefix.arg_types[length:],
+                                    prefix.arg_kinds[length:],
+                                    prefix.arg_names[length:],
+                                )
+                            )
+
+                            res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained))
+                            res.append(Constraint(constrained_from, SUBTYPE_OF, constrained))
                     else:
                         # This case should have been handled above.
                         assert not isinstance(tvar, TypeVarTupleType)
@@ -954,9 +1017,19 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                 prefix_len = len(prefix.arg_types)
                 cactual_ps = cactual.param_spec()
 
+                cactual_prefix: Parameters | CallableType
+                if cactual_ps:
+                    cactual_prefix = cactual_ps.prefix
+                else:
+                    cactual_prefix = cactual
+
+                max_prefix_len = len(
+                    [k for k in cactual_prefix.arg_kinds if k in (ARG_POS, ARG_OPT)]
+                )
+                prefix_len = min(prefix_len, max_prefix_len)
+
+                # we could check the prefixes match here, but that should be caught elsewhere.
                 if not cactual_ps:
-                    max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)])
-                    prefix_len = min(prefix_len, max_prefix_len)
                     res.append(
                         Constraint(
                             param_spec,
@@ -970,7 +1043,17 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                         )
                     )
                 else:
-                    res.append(Constraint(param_spec, SUBTYPE_OF, cactual_ps))
+                    # earlier, cactual_prefix = cactual_ps.prefix. thus, this is guaranteed
+                    assert isinstance(cactual_prefix, Parameters)
+
+                    constrained_by = cactual_ps.copy_modified(
+                        prefix=cactual_prefix.copy_modified(
+                            cactual_prefix.arg_types[prefix_len:],
+                            cactual_prefix.arg_kinds[prefix_len:],
+                            cactual_prefix.arg_names[prefix_len:],
+                        )
+                    )
+                    res.append(Constraint(param_spec, SUBTYPE_OF, constrained_by))
 
                 # compare prefixes
                 cactual_prefix = cactual.copy_modified(
diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py
index f40996145cba..be1d435f9cca 100644
--- a/mypy/test/testconstraints.py
+++ b/mypy/test/testconstraints.py
@@ -156,3 +156,65 @@ def test_var_length_tuple_with_fixed_length_tuple(self) -> None:
             Instance(fx.std_tuplei, [fx.a]),
             SUPERTYPE_OF,
         )
+
+    def test_paramspec_constrained_with_concatenate(self) -> None:
+        # for legibility (and my own understanding), `Tester.normal()` is `Tester[P]`
+        #  and `Tester.concatenate()` is `Tester[Concatenate[A, P]]`
+        #  ... and 2nd arg to infer_constraints ends up on LHS of equality
+        fx = self.fx
+
+        # I don't think we can parametrize...
+        for direction in (SUPERTYPE_OF, SUBTYPE_OF):
+            print(f"direction is {direction}")
+            # equiv to: x: Tester[Q] = Tester.normal()
+            assert set(
+                infer_constraints(Instance(fx.gpsi, [fx.p]), Instance(fx.gpsi, [fx.q]), direction)
+            ) == {
+                Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q),
+                Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q),
+            }
+
+            # equiv to: x: Tester[Q] = Tester.concatenate()
+            assert set(
+                infer_constraints(
+                    Instance(fx.gpsi, [fx.p_concatenate]), Instance(fx.gpsi, [fx.q]), direction
+                )
+            ) == {
+                Constraint(type_var=fx.p_concatenate, op=SUPERTYPE_OF, target=fx.q),
+                Constraint(type_var=fx.p_concatenate, op=SUBTYPE_OF, target=fx.q),
+            }
+
+            # equiv to: x: Tester[Concatenate[B, Q]] = Tester.normal()
+            assert set(
+                infer_constraints(
+                    Instance(fx.gpsi, [fx.p]), Instance(fx.gpsi, [fx.q_concatenate]), direction
+                )
+            ) == {
+                Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q_concatenate),
+                Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q_concatenate),
+            }
+
+            # equiv to: x: Tester[Concatenate[B, Q]] = Tester.concatenate()
+            assert set(
+                infer_constraints(
+                    Instance(fx.gpsi, [fx.p_concatenate]),
+                    Instance(fx.gpsi, [fx.q_concatenate]),
+                    direction,
+                )
+            ) == {
+                # this is correct as we assume other parts of mypy will warn that [B] != [A]
+                Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q),
+                Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q),
+            }
+
+            # equiv to: x: Tester[Concatenate[A, Q]] = Tester.concatenate()
+            assert set(
+                infer_constraints(
+                    Instance(fx.gpsi, [fx.p_concatenate]),
+                    Instance(fx.gpsi, [fx.q_concatenate]),
+                    direction,
+                )
+            ) == {
+                Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q),
+                Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q),
+            }
diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py
index bf1500a3cdec..df78eeb62956 100644
--- a/mypy/test/typefixture.py
+++ b/mypy/test/typefixture.py
@@ -5,6 +5,8 @@
 
 from __future__ import annotations
 
+from typing import Sequence
+
 from mypy.nodes import (
     ARG_OPT,
     ARG_POS,
@@ -26,6 +28,9 @@
     Instance,
     LiteralType,
     NoneType,
+    Parameters,
+    ParamSpecFlavor,
+    ParamSpecType,
     Type,
     TypeAliasType,
     TypeOfAny,
@@ -238,6 +243,31 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy
             "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1
         )
 
+        def make_parameter_specification(
+            name: str, id: int, concatenate: Sequence[Type]
+        ) -> ParamSpecType:
+            return ParamSpecType(
+                name,
+                name,
+                id,
+                ParamSpecFlavor.BARE,
+                self.o,
+                AnyType(TypeOfAny.from_omitted_generics),
+                prefix=Parameters(
+                    concatenate, [ARG_POS for _ in concatenate], [None for _ in concatenate]
+                ),
+            )
+
+        self.p = make_parameter_specification("P", 1, [])
+        self.p_concatenate = make_parameter_specification("P", 1, [self.a])
+        self.q = make_parameter_specification("Q", 2, [])
+        self.q_concatenate = make_parameter_specification("Q", 2, [self.b])
+        self.q_concatenate_a = make_parameter_specification("Q", 2, [self.a])
+
+        self.gpsi = self.make_type_info(
+            "GPS", mro=[self.oi], typevars=["P"], paramspec_indexes={0}
+        )
+
     def _add_bool_dunder(self, type_info: TypeInfo) -> None:
         signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function)
         bool_func = FuncDef("__bool__", [], Block([]))
@@ -299,6 +329,7 @@ def make_type_info(
         bases: list[Instance] | None = None,
         typevars: list[str] | None = None,
         typevar_tuple_index: int | None = None,
+        paramspec_indexes: set[int] | None = None,
         variances: list[int] | None = None,
     ) -> TypeInfo:
         """Make a TypeInfo suitable for use in unit tests."""
@@ -326,6 +357,17 @@ def make_type_info(
                             AnyType(TypeOfAny.from_omitted_generics),
                         )
                     )
+                elif paramspec_indexes is not None and id - 1 in paramspec_indexes:
+                    v.append(
+                        ParamSpecType(
+                            n,
+                            n,
+                            id,
+                            ParamSpecFlavor.BARE,
+                            self.o,
+                            AnyType(TypeOfAny.from_omitted_generics),
+                        )
+                    )
                 else:
                     if variances:
                         variance = variances[id - 1]
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index 114fe1f8438a..f11b9aa599ed 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -776,7 +776,7 @@ _P = ParamSpec("_P")
 
 class Job(Generic[_P]):
     def __init__(self, target: Callable[_P, None]) -> None:
-        self.target = target
+        ...
 
 def func(
     action: Union[Job[int], Callable[[int], None]],
@@ -1535,6 +1535,36 @@ def identity(func: Callable[P, None]) -> Callable[P, None]: ...
 def f(f: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ...
 [builtins fixtures/paramspec.pyi]
 
+[case testComplicatedParamSpecReturnType]
+# regression test for https://github.com/python/mypy/issues/15073
+from typing import TypeVar, Callable
+from typing_extensions import ParamSpec, Concatenate
+
+R = TypeVar("R")
+P = ParamSpec("P")
+
+def f(
+) -> Callable[[Callable[Concatenate[Callable[P, R], P], R]], Callable[P, R]]:
+    def r(fn: Callable[Concatenate[Callable[P, R], P], R]) -> Callable[P, R]: ...
+    return r
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecToParamSpecAssignment]
+# minimized from https://github.com/python/mypy/issues/15037
+# ~ the same as https://github.com/python/mypy/issues/15065
+from typing import Callable
+from typing_extensions import Concatenate, ParamSpec
+
+P = ParamSpec("P")
+
+def f(f: Callable[Concatenate[int, P], None]) -> Callable[P, None]: ...
+
+x: Callable[
+    [Callable[Concatenate[int, P], None]],
+    Callable[P, None],
+] = f
+[builtins fixtures/paramspec.pyi]
+
 [case testParamSpecDecoratorAppliedToGeneric]
 # flags: --new-type-inference
 from typing import Callable, List, TypeVar

From a7c48520560c3adf7176b91d16f4d0750ab8dfa9 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 9 Aug 2023 08:23:03 +0100
Subject: [PATCH 034/288] =?UTF-8?q?Revert=20"Reconsider=20constraints=20in?=
 =?UTF-8?q?volving=20parameter=20specifications=20(#1=E2=80=A6=20(#15832)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

…5272)"

This reverts commit 2aaeda4b84a863004a6694a7d562462fbe531ece.

<!-- If this pull request fixes an issue, add "Fixes #NNN" with the
issue number. -->

(Explain how this PR changes mypy.)

<!--
Checklist:
- Read the [Contributing
Guidelines](https://github.com/python/mypy/blob/master/CONTRIBUTING.md)
- Add tests for all changed behaviour.
- If you can't add a test, please explain why and how you verified your
changes work.
- Make sure CI passes.
- Please do not force push to the PR once it has been reviewed.
-->
---
 mypy/constraints.py                           | 129 ++++--------------
 mypy/test/testconstraints.py                  |  62 ---------
 mypy/test/typefixture.py                      |  42 ------
 .../unit/check-parameter-specification.test   |  32 +----
 4 files changed, 24 insertions(+), 241 deletions(-)

diff --git a/mypy/constraints.py b/mypy/constraints.py
index 9c55b56dd70e..299c6292a259 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -82,19 +82,15 @@ def __repr__(self) -> str:
         op_str = "<:"
         if self.op == SUPERTYPE_OF:
             op_str = ":>"
-        return f"{self.origin_type_var} {op_str} {self.target}"
+        return f"{self.type_var} {op_str} {self.target}"
 
     def __hash__(self) -> int:
-        return hash((self.origin_type_var, self.op, self.target))
+        return hash((self.type_var, self.op, self.target))
 
     def __eq__(self, other: object) -> bool:
         if not isinstance(other, Constraint):
             return False
-        return (self.origin_type_var, self.op, self.target) == (
-            other.origin_type_var,
-            other.op,
-            other.target,
-        )
+        return (self.type_var, self.op, self.target) == (other.type_var, other.op, other.target)
 
 
 def infer_constraints_for_callable(
@@ -702,54 +698,25 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                             )
                     elif isinstance(tvar, ParamSpecType) and isinstance(mapped_arg, ParamSpecType):
                         suffix = get_proper_type(instance_arg)
-                        prefix = mapped_arg.prefix
-                        length = len(prefix.arg_types)
 
                         if isinstance(suffix, CallableType):
+                            prefix = mapped_arg.prefix
                             from_concat = bool(prefix.arg_types) or suffix.from_concatenate
                             suffix = suffix.copy_modified(from_concatenate=from_concat)
 
                         if isinstance(suffix, (Parameters, CallableType)):
                             # no such thing as variance for ParamSpecs
                             # TODO: is there a case I am missing?
-                            length = min(length, len(suffix.arg_types))
-
-                            constrained_to = suffix.copy_modified(
-                                suffix.arg_types[length:],
-                                suffix.arg_kinds[length:],
-                                suffix.arg_names[length:],
-                            )
-                            constrained_from = mapped_arg.copy_modified(
-                                prefix=prefix.copy_modified(
-                                    prefix.arg_types[length:],
-                                    prefix.arg_kinds[length:],
-                                    prefix.arg_names[length:],
-                                )
+                            # TODO: constraints between prefixes
+                            prefix = mapped_arg.prefix
+                            suffix = suffix.copy_modified(
+                                suffix.arg_types[len(prefix.arg_types) :],
+                                suffix.arg_kinds[len(prefix.arg_kinds) :],
+                                suffix.arg_names[len(prefix.arg_names) :],
                             )
-
-                            res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained_to))
-                            res.append(Constraint(constrained_from, SUBTYPE_OF, constrained_to))
+                            res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix))
                         elif isinstance(suffix, ParamSpecType):
-                            suffix_prefix = suffix.prefix
-                            length = min(length, len(suffix_prefix.arg_types))
-
-                            constrained = suffix.copy_modified(
-                                prefix=suffix_prefix.copy_modified(
-                                    suffix_prefix.arg_types[length:],
-                                    suffix_prefix.arg_kinds[length:],
-                                    suffix_prefix.arg_names[length:],
-                                )
-                            )
-                            constrained_from = mapped_arg.copy_modified(
-                                prefix=prefix.copy_modified(
-                                    prefix.arg_types[length:],
-                                    prefix.arg_kinds[length:],
-                                    prefix.arg_names[length:],
-                                )
-                            )
-
-                            res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained))
-                            res.append(Constraint(constrained_from, SUBTYPE_OF, constrained))
+                            res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix))
                     else:
                         # This case should have been handled above.
                         assert not isinstance(tvar, TypeVarTupleType)
@@ -801,56 +768,26 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                         template_arg, ParamSpecType
                     ):
                         suffix = get_proper_type(mapped_arg)
-                        prefix = template_arg.prefix
-                        length = len(prefix.arg_types)
 
                         if isinstance(suffix, CallableType):
                             prefix = template_arg.prefix
                             from_concat = bool(prefix.arg_types) or suffix.from_concatenate
                             suffix = suffix.copy_modified(from_concatenate=from_concat)
 
-                        # TODO: this is almost a copy-paste of code above: make this into a function
                         if isinstance(suffix, (Parameters, CallableType)):
                             # no such thing as variance for ParamSpecs
                             # TODO: is there a case I am missing?
-                            length = min(length, len(suffix.arg_types))
+                            # TODO: constraints between prefixes
+                            prefix = template_arg.prefix
 
-                            constrained_to = suffix.copy_modified(
-                                suffix.arg_types[length:],
-                                suffix.arg_kinds[length:],
-                                suffix.arg_names[length:],
+                            suffix = suffix.copy_modified(
+                                suffix.arg_types[len(prefix.arg_types) :],
+                                suffix.arg_kinds[len(prefix.arg_kinds) :],
+                                suffix.arg_names[len(prefix.arg_names) :],
                             )
-                            constrained_from = template_arg.copy_modified(
-                                prefix=prefix.copy_modified(
-                                    prefix.arg_types[length:],
-                                    prefix.arg_kinds[length:],
-                                    prefix.arg_names[length:],
-                                )
-                            )
-
-                            res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained_to))
-                            res.append(Constraint(constrained_from, SUBTYPE_OF, constrained_to))
+                            res.append(Constraint(template_arg, SUPERTYPE_OF, suffix))
                         elif isinstance(suffix, ParamSpecType):
-                            suffix_prefix = suffix.prefix
-                            length = min(length, len(suffix_prefix.arg_types))
-
-                            constrained = suffix.copy_modified(
-                                prefix=suffix_prefix.copy_modified(
-                                    suffix_prefix.arg_types[length:],
-                                    suffix_prefix.arg_kinds[length:],
-                                    suffix_prefix.arg_names[length:],
-                                )
-                            )
-                            constrained_from = template_arg.copy_modified(
-                                prefix=prefix.copy_modified(
-                                    prefix.arg_types[length:],
-                                    prefix.arg_kinds[length:],
-                                    prefix.arg_names[length:],
-                                )
-                            )
-
-                            res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained))
-                            res.append(Constraint(constrained_from, SUBTYPE_OF, constrained))
+                            res.append(Constraint(template_arg, SUPERTYPE_OF, suffix))
                     else:
                         # This case should have been handled above.
                         assert not isinstance(tvar, TypeVarTupleType)
@@ -1017,19 +954,9 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                 prefix_len = len(prefix.arg_types)
                 cactual_ps = cactual.param_spec()
 
-                cactual_prefix: Parameters | CallableType
-                if cactual_ps:
-                    cactual_prefix = cactual_ps.prefix
-                else:
-                    cactual_prefix = cactual
-
-                max_prefix_len = len(
-                    [k for k in cactual_prefix.arg_kinds if k in (ARG_POS, ARG_OPT)]
-                )
-                prefix_len = min(prefix_len, max_prefix_len)
-
-                # we could check the prefixes match here, but that should be caught elsewhere.
                 if not cactual_ps:
+                    max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)])
+                    prefix_len = min(prefix_len, max_prefix_len)
                     res.append(
                         Constraint(
                             param_spec,
@@ -1043,17 +970,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                         )
                     )
                 else:
-                    # earlier, cactual_prefix = cactual_ps.prefix. thus, this is guaranteed
-                    assert isinstance(cactual_prefix, Parameters)
-
-                    constrained_by = cactual_ps.copy_modified(
-                        prefix=cactual_prefix.copy_modified(
-                            cactual_prefix.arg_types[prefix_len:],
-                            cactual_prefix.arg_kinds[prefix_len:],
-                            cactual_prefix.arg_names[prefix_len:],
-                        )
-                    )
-                    res.append(Constraint(param_spec, SUBTYPE_OF, constrained_by))
+                    res.append(Constraint(param_spec, SUBTYPE_OF, cactual_ps))
 
                 # compare prefixes
                 cactual_prefix = cactual.copy_modified(
diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py
index be1d435f9cca..f40996145cba 100644
--- a/mypy/test/testconstraints.py
+++ b/mypy/test/testconstraints.py
@@ -156,65 +156,3 @@ def test_var_length_tuple_with_fixed_length_tuple(self) -> None:
             Instance(fx.std_tuplei, [fx.a]),
             SUPERTYPE_OF,
         )
-
-    def test_paramspec_constrained_with_concatenate(self) -> None:
-        # for legibility (and my own understanding), `Tester.normal()` is `Tester[P]`
-        #  and `Tester.concatenate()` is `Tester[Concatenate[A, P]]`
-        #  ... and 2nd arg to infer_constraints ends up on LHS of equality
-        fx = self.fx
-
-        # I don't think we can parametrize...
-        for direction in (SUPERTYPE_OF, SUBTYPE_OF):
-            print(f"direction is {direction}")
-            # equiv to: x: Tester[Q] = Tester.normal()
-            assert set(
-                infer_constraints(Instance(fx.gpsi, [fx.p]), Instance(fx.gpsi, [fx.q]), direction)
-            ) == {
-                Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q),
-                Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q),
-            }
-
-            # equiv to: x: Tester[Q] = Tester.concatenate()
-            assert set(
-                infer_constraints(
-                    Instance(fx.gpsi, [fx.p_concatenate]), Instance(fx.gpsi, [fx.q]), direction
-                )
-            ) == {
-                Constraint(type_var=fx.p_concatenate, op=SUPERTYPE_OF, target=fx.q),
-                Constraint(type_var=fx.p_concatenate, op=SUBTYPE_OF, target=fx.q),
-            }
-
-            # equiv to: x: Tester[Concatenate[B, Q]] = Tester.normal()
-            assert set(
-                infer_constraints(
-                    Instance(fx.gpsi, [fx.p]), Instance(fx.gpsi, [fx.q_concatenate]), direction
-                )
-            ) == {
-                Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q_concatenate),
-                Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q_concatenate),
-            }
-
-            # equiv to: x: Tester[Concatenate[B, Q]] = Tester.concatenate()
-            assert set(
-                infer_constraints(
-                    Instance(fx.gpsi, [fx.p_concatenate]),
-                    Instance(fx.gpsi, [fx.q_concatenate]),
-                    direction,
-                )
-            ) == {
-                # this is correct as we assume other parts of mypy will warn that [B] != [A]
-                Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q),
-                Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q),
-            }
-
-            # equiv to: x: Tester[Concatenate[A, Q]] = Tester.concatenate()
-            assert set(
-                infer_constraints(
-                    Instance(fx.gpsi, [fx.p_concatenate]),
-                    Instance(fx.gpsi, [fx.q_concatenate]),
-                    direction,
-                )
-            ) == {
-                Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q),
-                Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q),
-            }
diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py
index df78eeb62956..bf1500a3cdec 100644
--- a/mypy/test/typefixture.py
+++ b/mypy/test/typefixture.py
@@ -5,8 +5,6 @@
 
 from __future__ import annotations
 
-from typing import Sequence
-
 from mypy.nodes import (
     ARG_OPT,
     ARG_POS,
@@ -28,9 +26,6 @@
     Instance,
     LiteralType,
     NoneType,
-    Parameters,
-    ParamSpecFlavor,
-    ParamSpecType,
     Type,
     TypeAliasType,
     TypeOfAny,
@@ -243,31 +238,6 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy
             "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1
         )
 
-        def make_parameter_specification(
-            name: str, id: int, concatenate: Sequence[Type]
-        ) -> ParamSpecType:
-            return ParamSpecType(
-                name,
-                name,
-                id,
-                ParamSpecFlavor.BARE,
-                self.o,
-                AnyType(TypeOfAny.from_omitted_generics),
-                prefix=Parameters(
-                    concatenate, [ARG_POS for _ in concatenate], [None for _ in concatenate]
-                ),
-            )
-
-        self.p = make_parameter_specification("P", 1, [])
-        self.p_concatenate = make_parameter_specification("P", 1, [self.a])
-        self.q = make_parameter_specification("Q", 2, [])
-        self.q_concatenate = make_parameter_specification("Q", 2, [self.b])
-        self.q_concatenate_a = make_parameter_specification("Q", 2, [self.a])
-
-        self.gpsi = self.make_type_info(
-            "GPS", mro=[self.oi], typevars=["P"], paramspec_indexes={0}
-        )
-
     def _add_bool_dunder(self, type_info: TypeInfo) -> None:
         signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function)
         bool_func = FuncDef("__bool__", [], Block([]))
@@ -329,7 +299,6 @@ def make_type_info(
         bases: list[Instance] | None = None,
         typevars: list[str] | None = None,
         typevar_tuple_index: int | None = None,
-        paramspec_indexes: set[int] | None = None,
         variances: list[int] | None = None,
     ) -> TypeInfo:
         """Make a TypeInfo suitable for use in unit tests."""
@@ -357,17 +326,6 @@ def make_type_info(
                             AnyType(TypeOfAny.from_omitted_generics),
                         )
                     )
-                elif paramspec_indexes is not None and id - 1 in paramspec_indexes:
-                    v.append(
-                        ParamSpecType(
-                            n,
-                            n,
-                            id,
-                            ParamSpecFlavor.BARE,
-                            self.o,
-                            AnyType(TypeOfAny.from_omitted_generics),
-                        )
-                    )
                 else:
                     if variances:
                         variance = variances[id - 1]
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index f11b9aa599ed..114fe1f8438a 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -776,7 +776,7 @@ _P = ParamSpec("_P")
 
 class Job(Generic[_P]):
     def __init__(self, target: Callable[_P, None]) -> None:
-        ...
+        self.target = target
 
 def func(
     action: Union[Job[int], Callable[[int], None]],
@@ -1535,36 +1535,6 @@ def identity(func: Callable[P, None]) -> Callable[P, None]: ...
 def f(f: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ...
 [builtins fixtures/paramspec.pyi]
 
-[case testComplicatedParamSpecReturnType]
-# regression test for https://github.com/python/mypy/issues/15073
-from typing import TypeVar, Callable
-from typing_extensions import ParamSpec, Concatenate
-
-R = TypeVar("R")
-P = ParamSpec("P")
-
-def f(
-) -> Callable[[Callable[Concatenate[Callable[P, R], P], R]], Callable[P, R]]:
-    def r(fn: Callable[Concatenate[Callable[P, R], P], R]) -> Callable[P, R]: ...
-    return r
-[builtins fixtures/paramspec.pyi]
-
-[case testParamSpecToParamSpecAssignment]
-# minimized from https://github.com/python/mypy/issues/15037
-# ~ the same as https://github.com/python/mypy/issues/15065
-from typing import Callable
-from typing_extensions import Concatenate, ParamSpec
-
-P = ParamSpec("P")
-
-def f(f: Callable[Concatenate[int, P], None]) -> Callable[P, None]: ...
-
-x: Callable[
-    [Callable[Concatenate[int, P], None]],
-    Callable[P, None],
-] = f
-[builtins fixtures/paramspec.pyi]
-
 [case testParamSpecDecoratorAppliedToGeneric]
 # flags: --new-type-inference
 from typing import Callable, List, TypeVar

From 8c219539380208bf5b8d189aafd4dec10f941f98 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 9 Aug 2023 16:33:38 +0100
Subject: [PATCH 035/288] New type inference: add support for upper bounds and
 values (#15813)

This is a third PR in series following
https://github.com/python/mypy/pull/15287 and
https://github.com/python/mypy/pull/15754. This one is quite simple: I
just add basic support for polymorphic inference involving type
variables with upper bounds and values. A complete support would be
quite complicated, and it will be a corner case to already rare
situation. Finally, it is written in a way that is easy to tune in the
future.

I also use this PR to add some unit tests for all three PRs so far,
other two PRs only added integration tests (and I clean up existing unit
tests as well).
---
 mypy/solve.py                      |  80 +++++++++--
 mypy/test/testsolve.py             | 205 +++++++++++++++++++++++++----
 mypy/test/typefixture.py           |   4 +
 test-data/unit/check-generics.test |  28 ++++
 4 files changed, 277 insertions(+), 40 deletions(-)

diff --git a/mypy/solve.py b/mypy/solve.py
index 02df90aff1e1..72b3d6f26618 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -10,11 +10,13 @@
 from mypy.expandtype import expand_type
 from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort
 from mypy.join import join_types
-from mypy.meet import meet_types
+from mypy.meet import meet_type_list, meet_types
 from mypy.subtypes import is_subtype
 from mypy.typeops import get_type_vars
 from mypy.types import (
     AnyType,
+    Instance,
+    NoneType,
     ProperType,
     Type,
     TypeOfAny,
@@ -108,7 +110,7 @@ def solve_constraints(
             else:
                 candidate = AnyType(TypeOfAny.special_form)
             res.append(candidate)
-    return res, [originals[tv] for tv in free_vars]
+    return res, free_vars
 
 
 def solve_with_dependent(
@@ -116,7 +118,7 @@ def solve_with_dependent(
     constraints: list[Constraint],
     original_vars: list[TypeVarId],
     originals: dict[TypeVarId, TypeVarLikeType],
-) -> tuple[Solutions, list[TypeVarId]]:
+) -> tuple[Solutions, list[TypeVarLikeType]]:
     """Solve set of constraints that may depend on each other, like T <: List[S].
 
     The whole algorithm consists of five steps:
@@ -135,23 +137,24 @@ def solve_with_dependent(
     raw_batches = list(topsort(prepare_sccs(sccs, dmap)))
 
     free_vars = []
+    free_solutions = {}
     for scc in raw_batches[0]:
         # If there are no bounds on this SCC, then the only meaningful solution we can
         # express, is that each variable is equal to a new free variable. For example,
         # if we have T <: S, S <: U, we deduce: T = S = U = <free>.
         if all(not lowers[tv] and not uppers[tv] for tv in scc):
-            # For convenience with current type application machinery, we use a stable
-            # choice that prefers the original type variables (not polymorphic ones) in SCC.
-            # TODO: be careful about upper bounds (or values) when introducing free vars.
-            free_vars.append(sorted(scc, key=lambda x: (x not in original_vars, x.raw_id))[0])
+            best_free = choose_free([originals[tv] for tv in scc], original_vars)
+            if best_free:
+                free_vars.append(best_free.id)
+                free_solutions[best_free.id] = best_free
 
     # Update lowers/uppers with free vars, so these can now be used
     # as valid solutions.
-    for l, u in graph.copy():
+    for l, u in graph:
         if l in free_vars:
-            lowers[u].add(originals[l])
+            lowers[u].add(free_solutions[l])
         if u in free_vars:
-            uppers[l].add(originals[u])
+            uppers[l].add(free_solutions[u])
 
     # Flatten the SCCs that are independent, we can solve them together,
     # since we don't need to update any targets in between.
@@ -166,7 +169,7 @@ def solve_with_dependent(
     for flat_batch in batches:
         res = solve_iteratively(flat_batch, graph, lowers, uppers)
         solutions.update(res)
-    return solutions, free_vars
+    return solutions, [free_solutions[tv] for tv in free_vars]
 
 
 def solve_iteratively(
@@ -276,6 +279,61 @@ def solve_one(lowers: Iterable[Type], uppers: Iterable[Type]) -> Type | None:
     return candidate
 
 
+def choose_free(
+    scc: list[TypeVarLikeType], original_vars: list[TypeVarId]
+) -> TypeVarLikeType | None:
+    """Choose the best solution for an SCC containing only type variables.
+
+    This is needed to preserve e.g. the upper bound in a situation like this:
+        def dec(f: Callable[[T], S]) -> Callable[[T], S]: ...
+
+        @dec
+        def test(x: U) -> U: ...
+
+    where U <: A.
+    """
+
+    if len(scc) == 1:
+        # Fast path, choice is trivial.
+        return scc[0]
+
+    common_upper_bound = meet_type_list([t.upper_bound for t in scc])
+    common_upper_bound_p = get_proper_type(common_upper_bound)
+    # We include None for when strict-optional is disabled.
+    if isinstance(common_upper_bound_p, (UninhabitedType, NoneType)):
+        # This will cause to infer <nothing>, which is better than a free TypeVar
+        # that has an upper bound <nothing>.
+        return None
+
+    values: list[Type] = []
+    for tv in scc:
+        if isinstance(tv, TypeVarType) and tv.values:
+            if values:
+                # It is too tricky to support multiple TypeVars with values
+                # within the same SCC.
+                return None
+            values = tv.values.copy()
+
+    if values and not is_trivial_bound(common_upper_bound_p):
+        # If there are both values and upper bound present, we give up,
+        # since type variables having both are not supported.
+        return None
+
+    # For convenience with current type application machinery, we use a stable
+    # choice that prefers the original type variables (not polymorphic ones) in SCC.
+    best = sorted(scc, key=lambda x: (x.id not in original_vars, x.id.raw_id))[0]
+    if isinstance(best, TypeVarType):
+        return best.copy_modified(values=values, upper_bound=common_upper_bound)
+    if is_trivial_bound(common_upper_bound_p):
+        # TODO: support more cases for ParamSpecs/TypeVarTuples
+        return best
+    return None
+
+
+def is_trivial_bound(tp: ProperType) -> bool:
+    return isinstance(tp, Instance) and tp.type.fullname == "builtins.object"
+
+
 def normalize_constraints(
     constraints: list[Constraint], vars: list[TypeVarId]
 ) -> list[Constraint]:
diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py
index 5d67203dbbf5..6566b03ef5e9 100644
--- a/mypy/test/testsolve.py
+++ b/mypy/test/testsolve.py
@@ -3,10 +3,10 @@
 from __future__ import annotations
 
 from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint
-from mypy.solve import solve_constraints
+from mypy.solve import Bounds, Graph, solve_constraints, transitive_closure
 from mypy.test.helpers import Suite, assert_equal
 from mypy.test.typefixture import TypeFixture
-from mypy.types import Type, TypeVarLikeType, TypeVarType
+from mypy.types import Type, TypeVarId, TypeVarLikeType, TypeVarType
 
 
 class SolveSuite(Suite):
@@ -17,11 +17,11 @@ def test_empty_input(self) -> None:
         self.assert_solve([], [], [])
 
     def test_simple_supertype_constraints(self) -> None:
-        self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.a)], [(self.fx.a, self.fx.o)])
+        self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.a)], [self.fx.a])
         self.assert_solve(
             [self.fx.t],
             [self.supc(self.fx.t, self.fx.a), self.supc(self.fx.t, self.fx.b)],
-            [(self.fx.a, self.fx.o)],
+            [self.fx.a],
         )
 
     def test_simple_subtype_constraints(self) -> None:
@@ -36,7 +36,7 @@ def test_both_kinds_of_constraints(self) -> None:
         self.assert_solve(
             [self.fx.t],
             [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.a)],
-            [(self.fx.b, self.fx.a)],
+            [self.fx.b],
         )
 
     def test_unsatisfiable_constraints(self) -> None:
@@ -49,7 +49,7 @@ def test_exactly_specified_result(self) -> None:
         self.assert_solve(
             [self.fx.t],
             [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.b)],
-            [(self.fx.b, self.fx.b)],
+            [self.fx.b],
         )
 
     def test_multiple_variables(self) -> None:
@@ -60,7 +60,7 @@ def test_multiple_variables(self) -> None:
                 self.supc(self.fx.s, self.fx.c),
                 self.subc(self.fx.t, self.fx.a),
             ],
-            [(self.fx.b, self.fx.a), (self.fx.c, self.fx.o)],
+            [self.fx.b, self.fx.c],
         )
 
     def test_no_constraints_for_var(self) -> None:
@@ -69,36 +69,32 @@ def test_no_constraints_for_var(self) -> None:
         self.assert_solve(
             [self.fx.t, self.fx.s],
             [self.supc(self.fx.s, self.fx.a)],
-            [self.fx.uninhabited, (self.fx.a, self.fx.o)],
+            [self.fx.uninhabited, self.fx.a],
         )
 
     def test_simple_constraints_with_dynamic_type(self) -> None:
-        self.assert_solve(
-            [self.fx.t], [self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]
-        )
+        self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.anyt)], [self.fx.anyt])
         self.assert_solve(
             [self.fx.t],
             [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.anyt)],
-            [(self.fx.anyt, self.fx.anyt)],
+            [self.fx.anyt],
         )
         self.assert_solve(
             [self.fx.t],
             [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.a)],
-            [(self.fx.anyt, self.fx.anyt)],
+            [self.fx.anyt],
         )
 
-        self.assert_solve(
-            [self.fx.t], [self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)]
-        )
+        self.assert_solve([self.fx.t], [self.subc(self.fx.t, self.fx.anyt)], [self.fx.anyt])
         self.assert_solve(
             [self.fx.t],
             [self.subc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.anyt)],
-            [(self.fx.anyt, self.fx.anyt)],
+            [self.fx.anyt],
         )
         # self.assert_solve([self.fx.t],
         #                   [self.subc(self.fx.t, self.fx.anyt),
         #                    self.subc(self.fx.t, self.fx.a)],
-        #                   [(self.fx.anyt, self.fx.anyt)])
+        #                   [self.fx.anyt])
         # TODO: figure out what this should be after changes to meet(any, X)
 
     def test_both_normal_and_any_types_in_results(self) -> None:
@@ -107,29 +103,180 @@ def test_both_normal_and_any_types_in_results(self) -> None:
         self.assert_solve(
             [self.fx.t],
             [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.anyt)],
-            [(self.fx.anyt, self.fx.anyt)],
+            [self.fx.anyt],
         )
 
         self.assert_solve(
             [self.fx.t],
             [self.supc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.a)],
-            [(self.fx.anyt, self.fx.anyt)],
+            [self.fx.anyt],
+        )
+
+    def test_poly_no_constraints(self) -> None:
+        self.assert_solve(
+            [self.fx.t, self.fx.u],
+            [],
+            [self.fx.uninhabited, self.fx.uninhabited],
+            allow_polymorphic=True,
+        )
+
+    def test_poly_trivial_free(self) -> None:
+        self.assert_solve(
+            [self.fx.t, self.fx.u],
+            [self.subc(self.fx.t, self.fx.a)],
+            [self.fx.a, self.fx.u],
+            [self.fx.u],
+            allow_polymorphic=True,
+        )
+
+    def test_poly_free_pair(self) -> None:
+        self.assert_solve(
+            [self.fx.t, self.fx.u],
+            [self.subc(self.fx.t, self.fx.u)],
+            [self.fx.t, self.fx.t],
+            [self.fx.t],
+            allow_polymorphic=True,
+        )
+
+    def test_poly_free_pair_with_bounds(self) -> None:
+        t_prime = self.fx.t.copy_modified(upper_bound=self.fx.b)
+        self.assert_solve(
+            [self.fx.t, self.fx.ub],
+            [self.subc(self.fx.t, self.fx.ub)],
+            [t_prime, t_prime],
+            [t_prime],
+            allow_polymorphic=True,
+        )
+
+    def test_poly_free_pair_with_bounds_uninhabited(self) -> None:
+        self.assert_solve(
+            [self.fx.ub, self.fx.uc],
+            [self.subc(self.fx.ub, self.fx.uc)],
+            [self.fx.uninhabited, self.fx.uninhabited],
+            [],
+            allow_polymorphic=True,
+        )
+
+    def test_poly_bounded_chain(self) -> None:
+        # B <: T <: U <: S <: A
+        self.assert_solve(
+            [self.fx.t, self.fx.u, self.fx.s],
+            [
+                self.supc(self.fx.t, self.fx.b),
+                self.subc(self.fx.t, self.fx.u),
+                self.subc(self.fx.u, self.fx.s),
+                self.subc(self.fx.s, self.fx.a),
+            ],
+            [self.fx.b, self.fx.b, self.fx.b],
+            allow_polymorphic=True,
+        )
+
+    def test_poly_reverse_overlapping_chain(self) -> None:
+        # A :> T <: S :> B
+        self.assert_solve(
+            [self.fx.t, self.fx.s],
+            [
+                self.subc(self.fx.t, self.fx.s),
+                self.subc(self.fx.t, self.fx.a),
+                self.supc(self.fx.s, self.fx.b),
+            ],
+            [self.fx.a, self.fx.a],
+            allow_polymorphic=True,
+        )
+
+    def test_poly_reverse_split_chain(self) -> None:
+        # B :> T <: S :> A
+        self.assert_solve(
+            [self.fx.t, self.fx.s],
+            [
+                self.subc(self.fx.t, self.fx.s),
+                self.subc(self.fx.t, self.fx.b),
+                self.supc(self.fx.s, self.fx.a),
+            ],
+            [self.fx.b, self.fx.a],
+            allow_polymorphic=True,
+        )
+
+    def test_poly_unsolvable_chain(self) -> None:
+        # A <: T <: U <: S <: B
+        self.assert_solve(
+            [self.fx.t, self.fx.u, self.fx.s],
+            [
+                self.supc(self.fx.t, self.fx.a),
+                self.subc(self.fx.t, self.fx.u),
+                self.subc(self.fx.u, self.fx.s),
+                self.subc(self.fx.s, self.fx.b),
+            ],
+            [None, None, None],
+            allow_polymorphic=True,
+        )
+
+    def test_simple_chain_closure(self) -> None:
+        self.assert_transitive_closure(
+            [self.fx.t.id, self.fx.s.id],
+            [
+                self.supc(self.fx.t, self.fx.b),
+                self.subc(self.fx.t, self.fx.s),
+                self.subc(self.fx.s, self.fx.a),
+            ],
+            {(self.fx.t.id, self.fx.s.id)},
+            {self.fx.t.id: {self.fx.b}, self.fx.s.id: {self.fx.b}},
+            {self.fx.t.id: {self.fx.a}, self.fx.s.id: {self.fx.a}},
+        )
+
+    def test_reverse_chain_closure(self) -> None:
+        self.assert_transitive_closure(
+            [self.fx.t.id, self.fx.s.id],
+            [
+                self.subc(self.fx.t, self.fx.s),
+                self.subc(self.fx.t, self.fx.a),
+                self.supc(self.fx.s, self.fx.b),
+            ],
+            {(self.fx.t.id, self.fx.s.id)},
+            {self.fx.t.id: set(), self.fx.s.id: {self.fx.b}},
+            {self.fx.t.id: {self.fx.a}, self.fx.s.id: set()},
+        )
+
+    def test_secondary_constraint_closure(self) -> None:
+        self.assert_transitive_closure(
+            [self.fx.t.id, self.fx.s.id],
+            [self.supc(self.fx.s, self.fx.gt), self.subc(self.fx.s, self.fx.ga)],
+            set(),
+            {self.fx.t.id: set(), self.fx.s.id: {self.fx.gt}},
+            {self.fx.t.id: {self.fx.a}, self.fx.s.id: {self.fx.ga}},
         )
 
     def assert_solve(
         self,
         vars: list[TypeVarLikeType],
         constraints: list[Constraint],
-        results: list[None | Type | tuple[Type, Type]],
+        results: list[None | Type],
+        free_vars: list[TypeVarLikeType] | None = None,
+        allow_polymorphic: bool = False,
+    ) -> None:
+        if free_vars is None:
+            free_vars = []
+        actual, actual_free = solve_constraints(
+            vars, constraints, allow_polymorphic=allow_polymorphic
+        )
+        assert_equal(actual, results)
+        assert_equal(actual_free, free_vars)
+
+    def assert_transitive_closure(
+        self,
+        vars: list[TypeVarId],
+        constraints: list[Constraint],
+        graph: Graph,
+        lowers: Bounds,
+        uppers: Bounds,
     ) -> None:
-        res: list[Type | None] = []
-        for r in results:
-            if isinstance(r, tuple):
-                res.append(r[0])
-            else:
-                res.append(r)
-        actual, _ = solve_constraints(vars, constraints)
-        assert_equal(str(actual), str(res))
+        actual_graph, actual_lowers, actual_uppers = transitive_closure(vars, constraints)
+        # Add trivial elements.
+        for v in vars:
+            graph.add((v, v))
+        assert_equal(actual_graph, graph)
+        assert_equal(dict(actual_lowers), lowers)
+        assert_equal(dict(actual_uppers), uppers)
 
     def supc(self, type_var: TypeVarType, bound: Type) -> Constraint:
         return Constraint(type_var, SUPERTYPE_OF, bound)
diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py
index bf1500a3cdec..81af765f8585 100644
--- a/mypy/test/typefixture.py
+++ b/mypy/test/typefixture.py
@@ -219,6 +219,10 @@ def make_type_var(
         self._add_bool_dunder(self.bool_type_info)
         self._add_bool_dunder(self.ai)
 
+        # TypeVars with non-trivial bounds
+        self.ub = make_type_var("UB", 5, [], self.b, variance)  # UB`5 (type variable)
+        self.uc = make_type_var("UC", 6, [], self.c, variance)  # UC`6 (type variable)
+
         def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType:
             return TypeVarTupleType(
                 name,
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 5c510a11b970..d1842a74d634 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -3007,3 +3007,31 @@ class C:
 
 c: C
 reveal_type(c.test())  # N: Revealed type is "__main__.C"
+
+[case testInferenceAgainstGenericBoundsAndValues]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List
+
+class B: ...
+class C(B): ...
+
+S = TypeVar('S')
+T = TypeVar('T')
+UB = TypeVar('UB', bound=B)
+UC = TypeVar('UC', bound=C)
+V = TypeVar('V', int, str)
+
+def dec1(f: Callable[[S], T]) -> Callable[[S], List[T]]:
+    ...
+def dec2(f: Callable[[UC], T]) -> Callable[[UC], List[T]]:
+    ...
+def id1(x: UB) -> UB:
+    ...
+def id2(x: V) -> V:
+    ...
+
+reveal_type(dec1(id1))  # N: Revealed type is "def [S <: __main__.B] (S`1) -> builtins.list[S`1]"
+reveal_type(dec1(id2))  # N: Revealed type is "def [S in (builtins.int, builtins.str)] (S`3) -> builtins.list[S`3]"
+reveal_type(dec2(id1))  # N: Revealed type is "def [UC <: __main__.C] (UC`5) -> builtins.list[UC`5]"
+reveal_type(dec2(id2))  # N: Revealed type is "def (<nothing>) -> builtins.list[<nothing>]" \
+                        # E: Argument 1 to "dec2" has incompatible type "Callable[[V], V]"; expected "Callable[[<nothing>], <nothing>]"

From 78339b97dc911c8c6841184eaddbbc30d0e406da Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Thu, 10 Aug 2023 01:50:20 -0700
Subject: [PATCH 036/288] Use error subcodes to differentiate import errors
 (#14740)

Resolves #9789

Users could use `--disable-error-code=import-untyped` to only ignore
errors about libraries not having stubs, but continue to get errors
about e.g. typos in an import name.

The error subcode mechanism is new from #14570. Note that users will now
get a different error code depending on whether or not a package is
installed, and may not know that they can use the parent error code to
ignore the issue regardless. I think this is okay, in general type
checking results can change if you run them in two different
environments. Note also that with `--warn-unused-ignore` / `--strict`
mypy will complain about not having the most specific error code
---
 mypy/build.py                        | 11 ++++++++++-
 mypy/errorcodes.py                   |  6 ++++++
 mypy/errors.py                       |  8 ++++++--
 test-data/unit/check-errorcodes.test | 14 +++++++-------
 4 files changed, 29 insertions(+), 10 deletions(-)

diff --git a/mypy/build.py b/mypy/build.py
index 5a0a481ae1a2..eed5005d182e 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -2780,7 +2780,16 @@ def module_not_found(
     else:
         daemon = manager.options.fine_grained_incremental
         msg, notes = reason.error_message_templates(daemon)
-        errors.report(line, 0, msg.format(module=target), code=codes.IMPORT)
+        if reason == ModuleNotFoundReason.NOT_FOUND:
+            code = codes.IMPORT_NOT_FOUND
+        elif (
+            reason == ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS
+            or reason == ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED
+        ):
+            code = codes.IMPORT_UNTYPED
+        else:
+            code = codes.IMPORT
+        errors.report(line, 0, msg.format(module=target), code=code)
         top_level, second_level = get_top_two_prefixes(target)
         if second_level in legacy_bundled_packages or second_level in non_bundled_packages:
             top_level = second_level
diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py
index 717629ad1f11..e7d0c16f2d2d 100644
--- a/mypy/errorcodes.py
+++ b/mypy/errorcodes.py
@@ -107,6 +107,12 @@ def __hash__(self) -> int:
 IMPORT: Final = ErrorCode(
     "import", "Require that imported module can be found or has stubs", "General"
 )
+IMPORT_NOT_FOUND: Final = ErrorCode(
+    "import-not-found", "Require that imported module can be found", "General", sub_code_of=IMPORT
+)
+IMPORT_UNTYPED: Final = ErrorCode(
+    "import-untyped", "Require that imported module has stubs", "General", sub_code_of=IMPORT
+)
 NO_REDEF: Final = ErrorCode("no-redef", "Check that each name is defined once", "General")
 FUNC_RETURNS_VALUE: Final = ErrorCode(
     "func-returns-value", "Check that called function returns a value in value context", "General"
diff --git a/mypy/errors.py b/mypy/errors.py
index 2badac3e3d6d..680b7f1d31ea 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -8,7 +8,7 @@
 from typing_extensions import Literal, TypeAlias as _TypeAlias
 
 from mypy import errorcodes as codes
-from mypy.errorcodes import IMPORT, ErrorCode
+from mypy.errorcodes import IMPORT, IMPORT_NOT_FOUND, IMPORT_UNTYPED, ErrorCode
 from mypy.message_registry import ErrorMessage
 from mypy.options import Options
 from mypy.scope import Scope
@@ -510,7 +510,11 @@ def add_error_info(self, info: ErrorInfo) -> None:
             if info.message in self.only_once_messages:
                 return
             self.only_once_messages.add(info.message)
-        if self.seen_import_error and info.code is not IMPORT and self.has_many_errors():
+        if (
+            self.seen_import_error
+            and info.code not in (IMPORT, IMPORT_UNTYPED, IMPORT_NOT_FOUND)
+            and self.has_many_errors()
+        ):
             # Missing stubs can easily cause thousands of errors about
             # Any types, especially when upgrading to mypy 0.900,
             # which no longer bundles third-party library stubs. Avoid
diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test
index 1efbab7de322..796e1c1ea98e 100644
--- a/test-data/unit/check-errorcodes.test
+++ b/test-data/unit/check-errorcodes.test
@@ -183,7 +183,7 @@ from defusedxml import xyz  # type: ignore[import]
 
 [case testErrorCodeBadIgnore]
 import nostub # type: ignore xyz  # E: Invalid "type: ignore" comment  [syntax] \
-                                  # E: Cannot find implementation or library stub for module named "nostub"  [import] \
+                                  # E: Cannot find implementation or library stub for module named "nostub"  [import-not-found] \
                                   # N: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
 import nostub # type: ignore[  # E: Invalid "type: ignore" comment  [syntax]
 import nostub # type: ignore[foo  # E: Invalid "type: ignore" comment  [syntax]
@@ -211,7 +211,7 @@ def f(x,  # type: int  # type: ignore[
     pass
 [out]
 main:2: error: Invalid "type: ignore" comment  [syntax]
-main:2: error: Cannot find implementation or library stub for module named "nostub"  [import]
+main:2: error: Cannot find implementation or library stub for module named "nostub"  [import-not-found]
 main:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
 main:3: error: Invalid "type: ignore" comment  [syntax]
 main:4: error: Invalid "type: ignore" comment  [syntax]
@@ -522,12 +522,12 @@ if int() is str():  # E: Non-overlapping identity check (left operand type: "int
 [builtins fixtures/primitives.pyi]
 
 [case testErrorCodeMissingModule]
-from defusedxml import xyz  # E: Cannot find implementation or library stub for module named "defusedxml"  [import]
-from nonexistent import foobar  # E: Cannot find implementation or library stub for module named "nonexistent"  [import]
-import nonexistent2  # E: Cannot find implementation or library stub for module named "nonexistent2"  [import]
-from nonexistent3 import *  # E: Cannot find implementation or library stub for module named "nonexistent3"  [import]
+from defusedxml import xyz  # E: Cannot find implementation or library stub for module named "defusedxml"  [import-not-found]
+from nonexistent import foobar  # E: Cannot find implementation or library stub for module named "nonexistent"  [import-not-found]
+import nonexistent2  # E: Cannot find implementation or library stub for module named "nonexistent2"  [import-not-found]
+from nonexistent3 import *  # E: Cannot find implementation or library stub for module named "nonexistent3"  [import-not-found]
 from pkg import bad  # E: Module "pkg" has no attribute "bad"  [attr-defined]
-from pkg.bad2 import bad3  # E: Cannot find implementation or library stub for module named "pkg.bad2"  [import] \
+from pkg.bad2 import bad3  # E: Cannot find implementation or library stub for module named "pkg.bad2"  [import-not-found] \
                            # N: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
 [file pkg/__init__.py]
 

From eab5b5083adf1b54ab1691f5ecc5a846863420de Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Thu, 10 Aug 2023 02:32:08 -0700
Subject: [PATCH 037/288] Document new import error codes (#15840)

See https://github.com/python/mypy/pull/14740

My PR was pretty old and predates the nice check to ensure error codes
are documented.
---
 docs/source/error_code_list.rst | 38 +++++++++++++++++++++++++++++----
 1 file changed, 34 insertions(+), 4 deletions(-)

diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst
index f935e025e589..f7f702aa7fcb 100644
--- a/docs/source/error_code_list.rst
+++ b/docs/source/error_code_list.rst
@@ -648,8 +648,18 @@ the issue:
 
 .. _code-import:
 
-Check that import target can be found [import]
-----------------------------------------------
+Check for an issue with imports [import]
+----------------------------------------
+
+Mypy generates an error if it can't resolve an `import` statement.
+This is a parent error code of `import-not-found` and `import-untyped`
+
+See :ref:`ignore-missing-imports` for how to work around these errors.
+
+.. _code-import-not-found:
+
+Check that import target can be found [import-not-found]
+--------------------------------------------------------
 
 Mypy generates an error if it can't find the source code or a stub file
 for an imported module.
@@ -658,11 +668,31 @@ Example:
 
 .. code-block:: python
 
-    # Error: Cannot find implementation or library stub for module named 'acme'  [import]
-    import acme
+    # Error: Cannot find implementation or library stub for module named "m0dule_with_typo"  [import-not-found]
+    import m0dule_with_typo
 
 See :ref:`ignore-missing-imports` for how to work around these errors.
 
+.. _code-import-untyped:
+
+Check that import target can be found [import-untyped]
+--------------------------------------------------------
+
+Mypy generates an error if it can find the source code for an imported module,
+but that module does not provide type annotations (via :ref:`PEP 561 <installed-packages>`).
+
+Example:
+
+.. code-block:: python
+
+    # Error: Library stubs not installed for "bs4"  [import-untyped]
+    import bs4
+    # Error: Skipping analyzing "no_py_typed": module is installed, but missing library stubs or py.typed marker  [import-untyped]
+    import no_py_typed
+
+In some cases, these errors can be fixed by installing an appropriate
+stub package. See :ref:`ignore-missing-imports` for more details.
+
 .. _code-no-redef:
 
 Check that each name is defined once [no-redef]

From d0d63b4644a6bb99793b32548c5197cf7600544f Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Thu, 10 Aug 2023 13:03:39 +0300
Subject: [PATCH 038/288] The oldest CAPI version we support right now is 3.7
 (#15839)

Looks like `capi_version < 3.7` is not supported, so I changed the
lowest version to be `3.7`.

Based on the discord discussion.
---
 mypyc/codegen/emitclass.py  | 8 ++------
 mypyc/codegen/emitmodule.py | 5 ++---
 mypyc/common.py             | 5 -----
 mypyc/test/testutil.py      | 4 ++--
 4 files changed, 6 insertions(+), 16 deletions(-)

diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py
index 84d19d69d377..62e1b4b2dea1 100644
--- a/mypyc/codegen/emitclass.py
+++ b/mypyc/codegen/emitclass.py
@@ -18,7 +18,7 @@
     generate_richcompare_wrapper,
     generate_set_del_item_wrapper,
 )
-from mypyc.common import BITMAP_BITS, BITMAP_TYPE, NATIVE_PREFIX, PREFIX, REG_PREFIX, use_fastcall
+from mypyc.common import BITMAP_BITS, BITMAP_TYPE, NATIVE_PREFIX, PREFIX, REG_PREFIX
 from mypyc.ir.class_ir import ClassIR, VTableEntries
 from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD, FuncDecl, FuncIR
 from mypyc.ir.rtypes import RTuple, RType, object_rprimitive
@@ -794,11 +794,7 @@ def generate_methods_table(cl: ClassIR, name: str, emitter: Emitter) -> None:
             continue
         emitter.emit_line(f'{{"{fn.name}",')
         emitter.emit_line(f" (PyCFunction){PREFIX}{fn.cname(emitter.names)},")
-        if use_fastcall(emitter.capi_version):
-            flags = ["METH_FASTCALL"]
-        else:
-            flags = ["METH_VARARGS"]
-        flags.append("METH_KEYWORDS")
+        flags = ["METH_FASTCALL", "METH_KEYWORDS"]
         if fn.decl.kind == FUNC_STATICMETHOD:
             flags.append("METH_STATIC")
         elif fn.decl.kind == FUNC_CLASSMETHOD:
diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py
index f360fabbe8f6..caf2058ea7c4 100644
--- a/mypyc/codegen/emitmodule.py
+++ b/mypyc/codegen/emitmodule.py
@@ -43,7 +43,6 @@
     TOP_LEVEL_NAME,
     shared_lib_name,
     short_id_from_name,
-    use_fastcall,
     use_vectorcall,
 )
 from mypyc.errors import Errors
@@ -1107,8 +1106,8 @@ def is_fastcall_supported(fn: FuncIR, capi_version: tuple[int, int]) -> bool:
             # We can use vectorcalls (PEP 590) when supported
             return use_vectorcall(capi_version)
         # TODO: Support fastcall for __init__.
-        return use_fastcall(capi_version) and fn.name != "__init__"
-    return use_fastcall(capi_version)
+        return fn.name != "__init__"
+    return True
 
 
 def collect_literals(fn: FuncIR, literals: Literals) -> None:
diff --git a/mypyc/common.py b/mypyc/common.py
index 4615bf30d742..3d07f6c3d0d3 100644
--- a/mypyc/common.py
+++ b/mypyc/common.py
@@ -98,11 +98,6 @@ def short_name(name: str) -> str:
     return name
 
 
-def use_fastcall(capi_version: tuple[int, int]) -> bool:
-    # We can use METH_FASTCALL for faster wrapper functions on Python 3.7+.
-    return capi_version >= (3, 7)
-
-
 def use_vectorcall(capi_version: tuple[int, int]) -> bool:
     # We can use vectorcalls to make calls on Python 3.8+ (PEP 590).
     return capi_version >= (3, 8)
diff --git a/mypyc/test/testutil.py b/mypyc/test/testutil.py
index 796811a6363c..6446af3427af 100644
--- a/mypyc/test/testutil.py
+++ b/mypyc/test/testutil.py
@@ -102,7 +102,7 @@ def build_ir_for_single_file2(
 
     # By default generate IR compatible with the earliest supported Python C API.
     # If a test needs more recent API features, this should be overridden.
-    compiler_options = compiler_options or CompilerOptions(capi_version=(3, 5))
+    compiler_options = compiler_options or CompilerOptions(capi_version=(3, 7))
     options = Options()
     options.show_traceback = True
     options.hide_error_codes = True
@@ -272,7 +272,7 @@ def infer_ir_build_options_from_test_name(name: str) -> CompilerOptions | None:
         return None
     if "_32bit" in name and not IS_32_BIT_PLATFORM:
         return None
-    options = CompilerOptions(strip_asserts="StripAssert" in name, capi_version=(3, 5))
+    options = CompilerOptions(strip_asserts="StripAssert" in name, capi_version=(3, 7))
     # A suffix like _python3.8 is used to set the target C API version.
     m = re.search(r"_python([3-9]+)_([0-9]+)(_|\b)", name)
     if m:

From c7d2fa1525c9cbf0ab8859fd9ded526658677c28 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Thu, 10 Aug 2023 11:32:50 -0700
Subject: [PATCH 039/288] Fix over eager types-google-cloud-ndb suggestion
 (#15347)

Fixes #15343
---
 mypy/build.py                     | 30 ++++++++++++++++--------------
 mypy/modulefinder.py              |  9 ++-------
 mypy/stubinfo.py                  |  6 ++----
 mypy/util.py                      | 11 -----------
 test-data/unit/check-modules.test | 24 +++++++++++++-----------
 5 files changed, 33 insertions(+), 47 deletions(-)

diff --git a/mypy/build.py b/mypy/build.py
index eed5005d182e..525d5f436e7e 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -55,7 +55,6 @@
     DecodeError,
     decode_python_encoding,
     get_mypy_comments,
-    get_top_two_prefixes,
     hash_digest,
     is_stub_package_file,
     is_sub_path,
@@ -91,12 +90,7 @@
 from mypy.plugins.default import DefaultPlugin
 from mypy.renaming import LimitedVariableRenameVisitor, VariableRenameVisitor
 from mypy.stats import dump_type_stats
-from mypy.stubinfo import (
-    is_legacy_bundled_package,
-    legacy_bundled_packages,
-    non_bundled_packages,
-    stub_package_name,
-)
+from mypy.stubinfo import legacy_bundled_packages, non_bundled_packages, stub_distribution_name
 from mypy.types import Type
 from mypy.typestate import reset_global_state, type_state
 from mypy.version import __version__
@@ -2665,14 +2659,18 @@ def find_module_and_diagnose(
         # search path or the module has not been installed.
 
         ignore_missing_imports = options.ignore_missing_imports
-        top_level, second_level = get_top_two_prefixes(id)
+
+        id_components = id.split(".")
         # Don't honor a global (not per-module) ignore_missing_imports
         # setting for modules that used to have bundled stubs, as
         # otherwise updating mypy can silently result in new false
         # negatives. (Unless there are stubs but they are incomplete.)
         global_ignore_missing_imports = manager.options.ignore_missing_imports
         if (
-            (is_legacy_bundled_package(top_level) or is_legacy_bundled_package(second_level))
+            any(
+                ".".join(id_components[:i]) in legacy_bundled_packages
+                for i in range(len(id_components), 0, -1)
+            )
             and global_ignore_missing_imports
             and not options.ignore_missing_imports_per_module
             and result is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED
@@ -2790,15 +2788,19 @@ def module_not_found(
         else:
             code = codes.IMPORT
         errors.report(line, 0, msg.format(module=target), code=code)
-        top_level, second_level = get_top_two_prefixes(target)
-        if second_level in legacy_bundled_packages or second_level in non_bundled_packages:
-            top_level = second_level
+
+        components = target.split(".")
+        for i in range(len(components), 0, -1):
+            module = ".".join(components[:i])
+            if module in legacy_bundled_packages or module in non_bundled_packages:
+                break
+
         for note in notes:
             if "{stub_dist}" in note:
-                note = note.format(stub_dist=stub_package_name(top_level))
+                note = note.format(stub_dist=stub_distribution_name(module))
             errors.report(line, 0, note, severity="note", only_once=True, code=codes.IMPORT)
         if reason is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED:
-            manager.missing_stub_packages.add(stub_package_name(top_level))
+            manager.missing_stub_packages.add(stub_distribution_name(module))
     errors.set_import_context(save_import_context)
 
 
diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py
index c780015c639d..c36a382848bf 100644
--- a/mypy/modulefinder.py
+++ b/mypy/modulefinder.py
@@ -337,14 +337,9 @@ def _find_module_non_stub_helper(
             # If this is not a directory then we can't traverse further into it
             if not self.fscache.isdir(dir_path):
                 break
-        if approved_stub_package_exists(components[0]):
-            if len(components) == 1 or (
-                self.find_module(components[0])
-                is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED
-            ):
+        for i in range(len(components), 0, -1):
+            if approved_stub_package_exists(".".join(components[:i])):
                 return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED
-        if approved_stub_package_exists(".".join(components[:2])):
-            return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED
         if plausible_match:
             return ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS
         else:
diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py
index e6e549ad280f..0d76a6215238 100644
--- a/mypy/stubinfo.py
+++ b/mypy/stubinfo.py
@@ -9,15 +9,13 @@ def approved_stub_package_exists(prefix: str) -> bool:
     return is_legacy_bundled_package(prefix) or prefix in non_bundled_packages
 
 
-def stub_package_name(prefix: str) -> str:
+def stub_distribution_name(prefix: str) -> str:
     return legacy_bundled_packages.get(prefix) or non_bundled_packages[prefix]
 
 
 # Stubs for these third-party packages used to be shipped with mypy.
 #
 # Map package name to PyPI stub distribution name.
-#
-# Package name can have one or two components ('a' or 'a.b').
 legacy_bundled_packages = {
     "aiofiles": "types-aiofiles",
     "bleach": "types-bleach",
@@ -116,7 +114,7 @@ def stub_package_name(prefix: str) -> str:
     "flask_sqlalchemy": "types-Flask-SQLAlchemy",
     "fpdf": "types-fpdf2",
     "gdb": "types-gdb",
-    "google.cloud": "types-google-cloud-ndb",
+    "google.cloud.ndb": "types-google-cloud-ndb",
     "hdbcli": "types-hdbcli",
     "html5lib": "types-html5lib",
     "httplib2": "types-httplib2",
diff --git a/mypy/util.py b/mypy/util.py
index 268ba8f9de81..8a079c5256bc 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -308,17 +308,6 @@ def get_prefix(fullname: str) -> str:
     return fullname.rsplit(".", 1)[0]
 
 
-def get_top_two_prefixes(fullname: str) -> tuple[str, str]:
-    """Return one and two component prefixes of a fully qualified name.
-
-    Given 'a.b.c.d', return ('a', 'a.b').
-
-    If fullname has only one component, return (fullname, fullname).
-    """
-    components = fullname.split(".", 3)
-    return components[0], ".".join(components[:2])
-
-
 def correct_relative_import(
     cur_mod_id: str, relative: int, target: str, is_cur_package_init_file: bool
 ) -> tuple[str, bool]:
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
index bdf860cba89d..3da5996ed274 100644
--- a/test-data/unit/check-modules.test
+++ b/test-data/unit/check-modules.test
@@ -3121,26 +3121,28 @@ import google.cloud
 from google.cloud import x
 
 [case testErrorFromGoogleCloud]
-import google.cloud
+import google.cloud  # E: Cannot find implementation or library stub for module named "google.cloud" \
+                     # E: Cannot find implementation or library stub for module named "google"
 from google.cloud import x
-import google.non_existent
+import google.non_existent  # E: Cannot find implementation or library stub for module named "google.non_existent"
 from google.non_existent import x
-[out]
-main:1: error: Library stubs not installed for "google.cloud"
-main:1: note: Hint: "python3 -m pip install types-google-cloud-ndb"
-main:1: note: (or run "mypy --install-types" to install all missing stub packages)
-main:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
-main:1: error: Cannot find implementation or library stub for module named "google"
-main:3: error: Cannot find implementation or library stub for module named "google.non_existent"
+
+import google.cloud.ndb  # E: Library stubs not installed for "google.cloud.ndb" \
+                         # N: Hint: "python3 -m pip install types-google-cloud-ndb" \
+                         # N: (or run "mypy --install-types" to install all missing stub packages) \
+                         # N: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
+from google.cloud import ndb
 
 [case testMissingSubmoduleOfInstalledStubPackage]
 import bleach.xyz
 from bleach.abc import fgh
 [file bleach/__init__.pyi]
 [out]
-main:1: error: Cannot find implementation or library stub for module named "bleach.xyz"
+main:1: error: Library stubs not installed for "bleach.xyz"
+main:1: note: Hint: "python3 -m pip install types-bleach"
+main:1: note: (or run "mypy --install-types" to install all missing stub packages)
 main:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
-main:2: error: Cannot find implementation or library stub for module named "bleach.abc"
+main:2: error: Library stubs not installed for "bleach.abc"
 
 [case testMissingSubmoduleOfInstalledStubPackageIgnored]
 # flags: --ignore-missing-imports

From cfd01d9f7fdceb5eb8e367e8f1a6a1efb5ede38c Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Thu, 10 Aug 2023 13:49:27 -0700
Subject: [PATCH 040/288] Improve error code disabling documentation (#15841)

Provide a concrete example of what file level comments would look like.
Sort text into sections a little better.
---
 docs/source/error_codes.rst | 23 ++++++++++-------------
 1 file changed, 10 insertions(+), 13 deletions(-)

diff --git a/docs/source/error_codes.rst b/docs/source/error_codes.rst
index 65ae0e5816e8..a71168cadf30 100644
--- a/docs/source/error_codes.rst
+++ b/docs/source/error_codes.rst
@@ -43,11 +43,7 @@ Silencing errors based on error codes
 You can use a special comment ``# type: ignore[code, ...]`` to only
 ignore errors with a specific error code (or codes) on a particular
 line.  This can be used even if you have not configured mypy to show
-error codes. Currently it's only possible to disable arbitrary error
-codes on individual lines using this comment.
-
-You can also use :option:`--disable-error-code <mypy --disable-error-code>`
-to disable specific error codes globally.
+error codes.
 
 This example shows how to ignore an error about an imported name mypy
 thinks is undefined:
@@ -58,17 +54,17 @@ thinks is undefined:
    # definition.
    from foolib import foo  # type: ignore[attr-defined]
 
-
-Enabling specific error codes
------------------------------
+Enabling/disabling specific error codes globally
+------------------------------------------------
 
 There are command-line flags and config file settings for enabling
 certain optional error codes, such as :option:`--disallow-untyped-defs <mypy --disallow-untyped-defs>`,
 which enables the ``no-untyped-def`` error code.
 
-You can use :option:`--enable-error-code <mypy --enable-error-code>` to
-enable specific error codes that don't have a dedicated command-line
-flag or config file setting.
+You can use :option:`--enable-error-code <mypy --enable-error-code>`
+and :option:`--disable-error-code <mypy --disable-error-code>`
+to enable or disable specific error codes that don't have a dedicated
+command-line flag or config file setting.
 
 Per-module enabling/disabling error codes
 -----------------------------------------
@@ -107,8 +103,9 @@ still keep the other two error codes enabled. The overall logic is following:
 
 * Individual config sections *adjust* them per glob/module
 
-* Inline ``# mypy: ...`` comments can further *adjust* them for a specific
-  module
+* Inline ``# mypy: disable-error-code="..."`` comments can further
+  *adjust* them for a specific module.
+  For example: ``# mypy: disable-error-code="truthy-bool, ignore-without-code"``
 
 So one can e.g. enable some code globally, disable it for all tests in
 the corresponding config section, and then re-enable it with an inline

From 7f22aaa5783e25c2bbac81ad520d5b7702b39e4f Mon Sep 17 00:00:00 2001
From: Marcel Telka <marcel@telka.sk>
Date: Sat, 12 Aug 2023 09:09:32 +0200
Subject: [PATCH 041/288] Add tox.ini to sdist (#15853)

Fixes #14142
---
 MANIFEST.in | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/MANIFEST.in b/MANIFEST.in
index 1c26ae16fc78..b77b762b4852 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -39,9 +39,10 @@ graft test-data
 include conftest.py
 include runtests.py
 include pytest.ini
+include tox.ini
 
 include LICENSE mypyc/README.md
-exclude .gitmodules CONTRIBUTING.md CREDITS ROADMAP.md tox.ini action.yml .editorconfig
+exclude .gitmodules CONTRIBUTING.md CREDITS ROADMAP.md action.yml .editorconfig
 exclude .git-blame-ignore-revs .pre-commit-config.yaml
 
 global-exclude *.py[cod]

From 89c6596f0285b2f4b4b4c93f8f5696cae7a4398e Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
 <41898282+github-actions[bot]@users.noreply.github.com>
Date: Sat, 12 Aug 2023 00:10:26 -0700
Subject: [PATCH 042/288] Sync typeshed (#15792)

Source commit:

https://github.com/python/typeshed/commit/fe2ebd69af14d376825f21182d415223bd037485

Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Co-authored-by: hauntsaninja <hauntsaninja@gmail.com>
---
 mypy/typeshed/stdlib/_collections_abc.pyi     |   2 +
 mypy/typeshed/stdlib/_weakref.pyi             |   3 +
 mypy/typeshed/stdlib/abc.pyi                  |   2 +-
 mypy/typeshed/stdlib/argparse.pyi             |  10 +-
 mypy/typeshed/stdlib/array.pyi                |   6 +
 mypy/typeshed/stdlib/asyncio/futures.pyi      |   2 +-
 mypy/typeshed/stdlib/builtins.pyi             |  34 +++++-
 mypy/typeshed/stdlib/collections/__init__.pyi |   7 +-
 mypy/typeshed/stdlib/contextvars.pyi          |  14 ++-
 mypy/typeshed/stdlib/datetime.pyi             |   6 +
 mypy/typeshed/stdlib/email/charset.pyi        |   4 +-
 mypy/typeshed/stdlib/email/policy.pyi         |   8 +-
 mypy/typeshed/stdlib/enum.pyi                 |   1 +
 mypy/typeshed/stdlib/ftplib.pyi               |   2 +-
 mypy/typeshed/stdlib/http/client.pyi          |   4 +
 mypy/typeshed/stdlib/importlib/machinery.pyi  |   1 +
 .../stdlib/importlib/metadata/__init__.pyi    |   3 +
 mypy/typeshed/stdlib/inspect.pyi              |   2 +
 mypy/typeshed/stdlib/ipaddress.pyi            |  13 ++-
 .../stdlib/multiprocessing/managers.pyi       |  10 +-
 mypy/typeshed/stdlib/pdb.pyi                  |   3 +
 mypy/typeshed/stdlib/pydoc.pyi                |   2 +
 mypy/typeshed/stdlib/re.pyi                   |   2 +
 mypy/typeshed/stdlib/shelve.pyi               |   4 +-
 mypy/typeshed/stdlib/sqlite3/dbapi2.pyi       |  13 ++-
 mypy/typeshed/stdlib/ssl.pyi                  |   1 +
 mypy/typeshed/stdlib/tkinter/ttk.pyi          | 106 ++++++++++--------
 mypy/typeshed/stdlib/traceback.pyi            |   7 +-
 mypy/typeshed/stdlib/tracemalloc.pyi          |   5 +
 mypy/typeshed/stdlib/types.pyi                |  13 +++
 mypy/typeshed/stdlib/typing.pyi               |  11 +-
 mypy/typeshed/stdlib/typing_extensions.pyi    |  90 ++++++++++++++-
 mypy/typeshed/stdlib/unittest/case.pyi        |   3 +
 mypy/typeshed/stdlib/unittest/mock.pyi        |   6 +-
 mypy/typeshed/stdlib/urllib/request.pyi       |   8 +-
 mypy/typeshed/stdlib/uuid.pyi                 |   1 +
 mypy/typeshed/stdlib/weakref.pyi              |   9 +-
 mypy/typeshed/stdlib/winreg.pyi               |   1 +
 .../typeshed/stdlib/xml/etree/ElementTree.pyi |   1 +
 test-data/unit/pythoneval.test                |   6 +-
 40 files changed, 326 insertions(+), 100 deletions(-)

diff --git a/mypy/typeshed/stdlib/_collections_abc.pyi b/mypy/typeshed/stdlib/_collections_abc.pyi
index ba2f638d81c9..2b57f157a0e4 100644
--- a/mypy/typeshed/stdlib/_collections_abc.pyi
+++ b/mypy/typeshed/stdlib/_collections_abc.pyi
@@ -69,6 +69,7 @@ _VT_co = TypeVar("_VT_co", covariant=True)  # Value type covariant containers.
 
 @final
 class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]):  # undocumented
+    def __eq__(self, __value: object) -> bool: ...
     if sys.version_info >= (3, 10):
         @property
         def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
@@ -81,6 +82,7 @@ class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]):  # undocumented
 
 @final
 class dict_items(ItemsView[_KT_co, _VT_co], Generic[_KT_co, _VT_co]):  # undocumented
+    def __eq__(self, __value: object) -> bool: ...
     if sys.version_info >= (3, 10):
         @property
         def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
diff --git a/mypy/typeshed/stdlib/_weakref.pyi b/mypy/typeshed/stdlib/_weakref.pyi
index 2402d0bfe721..ce0f681248ab 100644
--- a/mypy/typeshed/stdlib/_weakref.pyi
+++ b/mypy/typeshed/stdlib/_weakref.pyi
@@ -11,17 +11,20 @@ _T = TypeVar("_T")
 
 @final
 class CallableProxyType(Generic[_C]):  # "weakcallableproxy"
+    def __eq__(self, __value: object) -> bool: ...
     def __getattr__(self, attr: str) -> Any: ...
     __call__: _C
 
 @final
 class ProxyType(Generic[_T]):  # "weakproxy"
+    def __eq__(self, __value: object) -> bool: ...
     def __getattr__(self, attr: str) -> Any: ...
 
 class ReferenceType(Generic[_T]):
     __callback__: Callable[[ReferenceType[_T]], Any]
     def __new__(cls, __o: _T, __callback: Callable[[ReferenceType[_T]], Any] | None = ...) -> Self: ...
     def __call__(self) -> _T | None: ...
+    def __eq__(self, __value: object) -> bool: ...
     def __hash__(self) -> int: ...
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, item: Any) -> GenericAlias: ...
diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi
index ec04d8f85d12..43893a298341 100644
--- a/mypy/typeshed/stdlib/abc.pyi
+++ b/mypy/typeshed/stdlib/abc.pyi
@@ -31,7 +31,7 @@ def abstractmethod(funcobj: _FuncT) -> _FuncT: ...
 
 class abstractclassmethod(classmethod[_T, _P, _R_co]):
     __isabstractmethod__: Literal[True]
-    def __init__(self, callable: Callable[Concatenate[_T, _P], _R_co]) -> None: ...
+    def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ...
 
 class abstractstaticmethod(staticmethod[_P, _R_co]):
     __isabstractmethod__: Literal[True]
diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi
index e41048516dd9..b59dd56ab921 100644
--- a/mypy/typeshed/stdlib/argparse.pyi
+++ b/mypy/typeshed/stdlib/argparse.pyi
@@ -85,7 +85,7 @@ class _ActionsContainer:
         self,
         *name_or_flags: str,
         action: _ActionStr | type[Action] = ...,
-        nargs: int | _NArgsStr | _SUPPRESS_T = ...,
+        nargs: int | _NArgsStr | _SUPPRESS_T | None = None,
         const: Any = ...,
         default: Any = ...,
         type: Callable[[str], _T] | FileType = ...,
@@ -171,7 +171,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
         ) -> None: ...
 
     @overload
-    def parse_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ...  # type: ignore[misc]
+    def parse_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ...  # type: ignore[misc]
     @overload
     def parse_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ...
     @overload
@@ -210,7 +210,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
     def format_usage(self) -> str: ...
     def format_help(self) -> str: ...
     @overload
-    def parse_known_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> tuple[Namespace, list[str]]: ...  # type: ignore[misc]
+    def parse_known_args(self, args: Sequence[str] | None = None, namespace: None = None) -> tuple[Namespace, list[str]]: ...  # type: ignore[misc]
     @overload
     def parse_known_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ...
     @overload
@@ -219,13 +219,13 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
     def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ...
     def error(self, message: str) -> NoReturn: ...
     @overload
-    def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ...  # type: ignore[misc]
+    def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ...  # type: ignore[misc]
     @overload
     def parse_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ...
     @overload
     def parse_intermixed_args(self, *, namespace: _N) -> _N: ...
     @overload
-    def parse_known_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> tuple[Namespace, list[str]]: ...  # type: ignore[misc]
+    def parse_known_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> tuple[Namespace, list[str]]: ...  # type: ignore[misc]
     @overload
     def parse_known_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ...
     @overload
diff --git a/mypy/typeshed/stdlib/array.pyi b/mypy/typeshed/stdlib/array.pyi
index 8b003503bc9b..b533f9240073 100644
--- a/mypy/typeshed/stdlib/array.pyi
+++ b/mypy/typeshed/stdlib/array.pyi
@@ -6,6 +6,9 @@ from collections.abc import Iterable
 from typing import Any, Generic, MutableSequence, TypeVar, overload  # noqa: Y022
 from typing_extensions import Literal, Self, SupportsIndex, TypeAlias
 
+if sys.version_info >= (3, 12):
+    from types import GenericAlias
+
 _IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"]
 _FloatTypeCode: TypeAlias = Literal["f", "d"]
 _UnicodeTypeCode: TypeAlias = Literal["u"]
@@ -70,6 +73,7 @@ class array(MutableSequence[_T], Generic[_T]):
     def __setitem__(self, __key: slice, __value: array[_T]) -> None: ...
     def __delitem__(self, __key: SupportsIndex | slice) -> None: ...
     def __add__(self, __value: array[_T]) -> array[_T]: ...
+    def __eq__(self, __value: object) -> bool: ...
     def __ge__(self, __value: array[_T]) -> bool: ...
     def __gt__(self, __value: array[_T]) -> bool: ...
     def __iadd__(self, __value: array[_T]) -> Self: ...  # type: ignore[override]
@@ -82,5 +86,7 @@ class array(MutableSequence[_T], Generic[_T]):
     def __deepcopy__(self, __unused: Any) -> array[_T]: ...
     def __buffer__(self, __flags: int) -> memoryview: ...
     def __release_buffer__(self, __buffer: memoryview) -> None: ...
+    if sys.version_info >= (3, 12):
+        def __class_getitem__(cls, item: Any) -> GenericAlias: ...
 
 ArrayType = array
diff --git a/mypy/typeshed/stdlib/asyncio/futures.pyi b/mypy/typeshed/stdlib/asyncio/futures.pyi
index 79209f5ed4fb..af05425d02a2 100644
--- a/mypy/typeshed/stdlib/asyncio/futures.pyi
+++ b/mypy/typeshed/stdlib/asyncio/futures.pyi
@@ -31,7 +31,7 @@ def isfuture(obj: object) -> TypeGuard[Future[Any]]: ...
 class Future(Awaitable[_T], Iterable[_T]):
     _state: str
     @property
-    def _exception(self) -> BaseException: ...
+    def _exception(self) -> BaseException | None: ...
     _blocking: bool
     @property
     def _log_traceback(self) -> bool: ...
diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index d6ca39049c77..66c644d09a4d 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -131,6 +131,9 @@ class staticmethod(Generic[_P, _R_co]):
     @property
     def __isabstractmethod__(self) -> bool: ...
     def __init__(self, __f: Callable[_P, _R_co]) -> None: ...
+    @overload
+    def __get__(self, __instance: None, __owner: type) -> Callable[_P, _R_co]: ...
+    @overload
     def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[_P, _R_co]: ...
     if sys.version_info >= (3, 10):
         __name__: str
@@ -141,16 +144,19 @@ class staticmethod(Generic[_P, _R_co]):
 
 class classmethod(Generic[_T, _P, _R_co]):
     @property
-    def __func__(self) -> Callable[Concatenate[_T, _P], _R_co]: ...
+    def __func__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ...
     @property
     def __isabstractmethod__(self) -> bool: ...
-    def __init__(self, __f: Callable[Concatenate[_T, _P], _R_co]) -> None: ...
+    def __init__(self, __f: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ...
+    @overload
     def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[_P, _R_co]: ...
+    @overload
+    def __get__(self, __instance: None, __owner: type[_T]) -> Callable[_P, _R_co]: ...
     if sys.version_info >= (3, 10):
         __name__: str
         __qualname__: str
         @property
-        def __wrapped__(self) -> Callable[Concatenate[_T, _P], _R_co]: ...
+        def __wrapped__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ...
 
 class type:
     @property
@@ -781,6 +787,8 @@ class memoryview(Sequence[int]):
     def __contains__(self, __x: object) -> bool: ...
     def __iter__(self) -> Iterator[int]: ...
     def __len__(self) -> int: ...
+    def __eq__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
     @overload
     def __setitem__(self, __key: slice, __value: ReadableBuffer) -> None: ...
     @overload
@@ -848,6 +856,7 @@ class slice:
     def __init__(self, __stop: Any) -> None: ...
     @overload
     def __init__(self, __start: Any, __stop: Any, __step: Any = ...) -> None: ...
+    def __eq__(self, __value: object) -> bool: ...
     __hash__: ClassVar[None]  # type: ignore[assignment]
     def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ...
 
@@ -864,6 +873,8 @@ class tuple(Sequence[_T_co], Generic[_T_co]):
     def __le__(self, __value: tuple[_T_co, ...]) -> bool: ...
     def __gt__(self, __value: tuple[_T_co, ...]) -> bool: ...
     def __ge__(self, __value: tuple[_T_co, ...]) -> bool: ...
+    def __eq__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
     @overload
     def __add__(self, __value: tuple[_T_co, ...]) -> tuple[_T_co, ...]: ...
     @overload
@@ -952,6 +963,7 @@ class list(MutableSequence[_T], Generic[_T]):
     def __ge__(self, __value: list[_T]) -> bool: ...
     def __lt__(self, __value: list[_T]) -> bool: ...
     def __le__(self, __value: list[_T]) -> bool: ...
+    def __eq__(self, __value: object) -> bool: ...
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
 
@@ -991,19 +1003,24 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     @overload
     def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]: ...
     # Positional-only in dict, but not in MutableMapping
-    @overload
+    @overload  # type: ignore[override]
     def get(self, __key: _KT) -> _VT | None: ...
     @overload
-    def get(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ...
+    def get(self, __key: _KT, __default: _VT) -> _VT: ...
+    @overload
+    def get(self, __key: _KT, __default: _T) -> _VT | _T: ...
     @overload
     def pop(self, __key: _KT) -> _VT: ...
     @overload
-    def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ...
+    def pop(self, __key: _KT, __default: _VT) -> _VT: ...
+    @overload
+    def pop(self, __key: _KT, __default: _T) -> _VT | _T: ...
     def __len__(self) -> int: ...
     def __getitem__(self, __key: _KT) -> _VT: ...
     def __setitem__(self, __key: _KT, __value: _VT) -> None: ...
     def __delitem__(self, __key: _KT) -> None: ...
     def __iter__(self) -> Iterator[_KT]: ...
+    def __eq__(self, __value: object) -> bool: ...
     if sys.version_info >= (3, 8):
         def __reversed__(self) -> Iterator[_KT]: ...
     __hash__: ClassVar[None]  # type: ignore[assignment]
@@ -1058,6 +1075,7 @@ class set(MutableSet[_T], Generic[_T]):
     def __lt__(self, __value: AbstractSet[object]) -> bool: ...
     def __ge__(self, __value: AbstractSet[object]) -> bool: ...
     def __gt__(self, __value: AbstractSet[object]) -> bool: ...
+    def __eq__(self, __value: object) -> bool: ...
     __hash__: ClassVar[None]  # type: ignore[assignment]
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
@@ -1086,6 +1104,8 @@ class frozenset(AbstractSet[_T_co], Generic[_T_co]):
     def __lt__(self, __value: AbstractSet[object]) -> bool: ...
     def __ge__(self, __value: AbstractSet[object]) -> bool: ...
     def __gt__(self, __value: AbstractSet[object]) -> bool: ...
+    def __eq__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
 
@@ -1111,6 +1131,8 @@ class range(Sequence[int]):
     def count(self, __value: int) -> int: ...
     def index(self, __value: int) -> int: ...  # type: ignore[override]
     def __len__(self) -> int: ...
+    def __eq__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
     def __contains__(self, __key: object) -> bool: ...
     def __iter__(self) -> Iterator[int]: ...
     @overload
diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi
index 36d79101908d..8ceecd1f354e 100644
--- a/mypy/typeshed/stdlib/collections/__init__.pyi
+++ b/mypy/typeshed/stdlib/collections/__init__.pyi
@@ -153,6 +153,7 @@ class UserString(Sequence[UserString]):
     def __gt__(self, string: str | UserString) -> bool: ...
     def __ge__(self, string: str | UserString) -> bool: ...
     def __eq__(self, string: object) -> bool: ...
+    def __hash__(self) -> int: ...
     def __contains__(self, char: object) -> bool: ...
     def __len__(self) -> int: ...
     def __getitem__(self, index: SupportsIndex | slice) -> Self: ...
@@ -257,6 +258,7 @@ class deque(MutableSequence[_T], Generic[_T]):
     def __le__(self, __value: deque[_T]) -> bool: ...
     def __gt__(self, __value: deque[_T]) -> bool: ...
     def __ge__(self, __value: deque[_T]) -> bool: ...
+    def __eq__(self, __value: object) -> bool: ...
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
 
@@ -365,6 +367,7 @@ class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
     def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ...
     @overload
     def setdefault(self, key: _KT, default: _VT) -> _VT: ...
+    def __eq__(self, __value: object) -> bool: ...
 
 class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]):
     default_factory: Callable[[], _VT] | None
@@ -429,7 +432,9 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     @overload
     def pop(self, key: _KT) -> _VT: ...
     @overload
-    def pop(self, key: _KT, default: _VT | _T) -> _VT | _T: ...
+    def pop(self, key: _KT, default: _VT) -> _VT: ...
+    @overload
+    def pop(self, key: _KT, default: _T) -> _VT | _T: ...
     def copy(self) -> Self: ...
     __copy__ = copy
     # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`.
diff --git a/mypy/typeshed/stdlib/contextvars.pyi b/mypy/typeshed/stdlib/contextvars.pyi
index ef6e2700e667..63b5f80aea6c 100644
--- a/mypy/typeshed/stdlib/contextvars.pyi
+++ b/mypy/typeshed/stdlib/contextvars.pyi
@@ -18,16 +18,21 @@ class ContextVar(Generic[_T]):
     def __init__(self, name: str) -> None: ...
     @overload
     def __init__(self, name: str, *, default: _T) -> None: ...
+    def __hash__(self) -> int: ...
     @property
     def name(self) -> str: ...
     @overload
     def get(self) -> _T: ...
     if sys.version_info >= (3, 8):
         @overload
-        def get(self, default: _D | _T) -> _D | _T: ...
+        def get(self, default: _T) -> _T: ...
+        @overload
+        def get(self, default: _D) -> _D | _T: ...
     else:
         @overload
-        def get(self, __default: _D | _T) -> _D | _T: ...
+        def get(self, __default: _T) -> _T: ...
+        @overload
+        def get(self, __default: _D) -> _D | _T: ...
 
     def set(self, __value: _T) -> Token[_T]: ...
     def reset(self, __token: Token[_T]) -> None: ...
@@ -52,7 +57,9 @@ def copy_context() -> Context: ...
 class Context(Mapping[ContextVar[Any], Any]):
     def __init__(self) -> None: ...
     @overload
-    def get(self, __key: ContextVar[_T]) -> _T | None: ...
+    def get(self, __key: ContextVar[_T], __default: None = None) -> _T | None: ...  # type: ignore[misc]  # overlapping overloads
+    @overload
+    def get(self, __key: ContextVar[_T], __default: _T) -> _T: ...
     @overload
     def get(self, __key: ContextVar[_T], __default: _D) -> _T | _D: ...
     def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ...
@@ -60,3 +67,4 @@ class Context(Mapping[ContextVar[Any], Any]):
     def __getitem__(self, __key: ContextVar[_T]) -> _T: ...
     def __iter__(self) -> Iterator[ContextVar[Any]]: ...
     def __len__(self) -> int: ...
+    def __eq__(self, __value: object) -> bool: ...
diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi
index 00d511915f20..36577c5b7e1b 100644
--- a/mypy/typeshed/stdlib/datetime.pyi
+++ b/mypy/typeshed/stdlib/datetime.pyi
@@ -36,6 +36,7 @@ class timezone(tzinfo):
     def utcoffset(self, __dt: datetime | None) -> timedelta: ...
     def dst(self, __dt: datetime | None) -> None: ...
     def __hash__(self) -> int: ...
+    def __eq__(self, __value: object) -> bool: ...
 
 if sys.version_info >= (3, 11):
     UTC: timezone
@@ -87,6 +88,7 @@ class date:
     def __lt__(self, __value: date) -> bool: ...
     def __ge__(self, __value: date) -> bool: ...
     def __gt__(self, __value: date) -> bool: ...
+    def __eq__(self, __value: object) -> bool: ...
     if sys.version_info >= (3, 8):
         def __add__(self, __value: timedelta) -> Self: ...
         def __radd__(self, __value: timedelta) -> Self: ...
@@ -145,6 +147,7 @@ class time:
     def __lt__(self, __value: time) -> bool: ...
     def __ge__(self, __value: time) -> bool: ...
     def __gt__(self, __value: time) -> bool: ...
+    def __eq__(self, __value: object) -> bool: ...
     def __hash__(self) -> int: ...
     def isoformat(self, timespec: str = ...) -> str: ...
     @classmethod
@@ -219,6 +222,7 @@ class timedelta:
     def __lt__(self, __value: timedelta) -> bool: ...
     def __ge__(self, __value: timedelta) -> bool: ...
     def __gt__(self, __value: timedelta) -> bool: ...
+    def __eq__(self, __value: object) -> bool: ...
     def __bool__(self) -> bool: ...
     def __hash__(self) -> int: ...
 
@@ -310,6 +314,8 @@ class datetime(date):
     def __lt__(self, __value: datetime) -> bool: ...  # type: ignore[override]
     def __ge__(self, __value: datetime) -> bool: ...  # type: ignore[override]
     def __gt__(self, __value: datetime) -> bool: ...  # type: ignore[override]
+    def __eq__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
     if sys.version_info >= (3, 8):
         @overload  # type: ignore[override]
         def __sub__(self, __value: timedelta) -> Self: ...
diff --git a/mypy/typeshed/stdlib/email/charset.pyi b/mypy/typeshed/stdlib/email/charset.pyi
index d61950a26424..f8de016ab8bf 100644
--- a/mypy/typeshed/stdlib/email/charset.pyi
+++ b/mypy/typeshed/stdlib/email/charset.pyi
@@ -19,11 +19,11 @@ class Charset:
     def get_body_encoding(self) -> str | Callable[[Message], None]: ...
     def get_output_charset(self) -> str | None: ...
     def header_encode(self, string: str) -> str: ...
-    def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str]: ...
+    def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str | None]: ...
     @overload
     def body_encode(self, string: None) -> None: ...
     @overload
-    def body_encode(self, string: str) -> str: ...
+    def body_encode(self, string: str | bytes) -> str: ...
     def __eq__(self, other: object) -> bool: ...
     def __ne__(self, __value: object) -> bool: ...
 
diff --git a/mypy/typeshed/stdlib/email/policy.pyi b/mypy/typeshed/stdlib/email/policy.pyi
index dc7f18489bfa..804044031fcd 100644
--- a/mypy/typeshed/stdlib/email/policy.pyi
+++ b/mypy/typeshed/stdlib/email/policy.pyi
@@ -53,7 +53,7 @@ compat32: Compat32
 class EmailPolicy(Policy):
     utf8: bool
     refold_source: str
-    header_factory: Callable[[str, str], str]
+    header_factory: Callable[[str, Any], Any]
     content_manager: ContentManager
     def __init__(
         self,
@@ -70,9 +70,9 @@ class EmailPolicy(Policy):
         content_manager: ContentManager = ...,
     ) -> None: ...
     def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ...
-    def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ...
-    def header_fetch_parse(self, name: str, value: str) -> str: ...
-    def fold(self, name: str, value: str) -> str: ...
+    def header_store_parse(self, name: str, value: Any) -> tuple[str, Any]: ...
+    def header_fetch_parse(self, name: str, value: str) -> Any: ...
+    def fold(self, name: str, value: str) -> Any: ...
     def fold_binary(self, name: str, value: str) -> bytes: ...
 
 default: EmailPolicy
diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi
index 96a96dbce10e..60cc27215fd0 100644
--- a/mypy/typeshed/stdlib/enum.pyi
+++ b/mypy/typeshed/stdlib/enum.pyi
@@ -190,6 +190,7 @@ class Enum(metaclass=EnumMeta):
     # and in practice using `object` here has the same effect as using `Any`.
     def __new__(cls, value: object) -> Self: ...
     def __dir__(self) -> list[str]: ...
+    def __hash__(self) -> int: ...
     def __format__(self, format_spec: str) -> str: ...
     def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ...
 
diff --git a/mypy/typeshed/stdlib/ftplib.pyi b/mypy/typeshed/stdlib/ftplib.pyi
index f24d14fbf2b6..2d2ffa9aff03 100644
--- a/mypy/typeshed/stdlib/ftplib.pyi
+++ b/mypy/typeshed/stdlib/ftplib.pyi
@@ -87,7 +87,7 @@ class FTP:
     def makepasv(self) -> tuple[str, int]: ...
     def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ...
     # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers.
-    def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int]: ...
+    def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: ...
     def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ...
     def retrbinary(
         self, cmd: str, callback: Callable[[bytes], object], blocksize: int = 8192, rest: int | str | None = None
diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi
index 41ece1b050b8..4b5ed3d8bda0 100644
--- a/mypy/typeshed/stdlib/http/client.pyi
+++ b/mypy/typeshed/stdlib/http/client.pyi
@@ -115,6 +115,10 @@ class HTTPResponse(io.BufferedIOBase, BinaryIO):  # type: ignore[misc]  # incomp
     chunk_left: int | None
     length: int | None
     will_close: bool
+    # url is set on instances of the class in urllib.request.AbstractHTTPHandler.do_open
+    # to match urllib.response.addinfourl's interface.
+    # It's not set in HTTPResponse.__init__ or any other method on the class
+    url: str
     def __init__(self, sock: socket, debuglevel: int = 0, method: str | None = None, url: str | None = None) -> None: ...
     def peek(self, n: int = -1) -> bytes: ...
     def read(self, amt: int | None = None) -> bytes: ...
diff --git a/mypy/typeshed/stdlib/importlib/machinery.pyi b/mypy/typeshed/stdlib/importlib/machinery.pyi
index 5aaefce87e3a..f5037da00d5f 100644
--- a/mypy/typeshed/stdlib/importlib/machinery.pyi
+++ b/mypy/typeshed/stdlib/importlib/machinery.pyi
@@ -148,3 +148,4 @@ class ExtensionFileLoader(importlib.abc.ExecutionLoader):
     def exec_module(self, module: types.ModuleType) -> None: ...
     def get_code(self, fullname: str) -> None: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
index 083453cd3c9a..0af33bc876c4 100644
--- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
+++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
@@ -66,6 +66,9 @@ class EntryPoint(_EntryPointBase):
             extras: list[str] = ...,
         ) -> bool: ...  # undocumented
 
+    def __hash__(self) -> int: ...
+    def __eq__(self, other: object) -> bool: ...
+
 if sys.version_info >= (3, 10):
     class EntryPoints(list[EntryPoint]):  # use as list is deprecated since 3.10
         # int argument is deprecated since 3.10
diff --git a/mypy/typeshed/stdlib/inspect.pyi b/mypy/typeshed/stdlib/inspect.pyi
index 9af4c39bae9e..601d23e786ac 100644
--- a/mypy/typeshed/stdlib/inspect.pyi
+++ b/mypy/typeshed/stdlib/inspect.pyi
@@ -354,6 +354,7 @@ class Signature:
         def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ...
 
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 if sys.version_info >= (3, 10):
     def get_annotations(
@@ -413,6 +414,7 @@ class Parameter:
         annotation: Any = ...,
     ) -> Self: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 class BoundArguments:
     arguments: OrderedDict[str, Any]
diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi
index fc42cf03e2bb..945e8bcbbdee 100644
--- a/mypy/typeshed/stdlib/ipaddress.pyi
+++ b/mypy/typeshed/stdlib/ipaddress.pyi
@@ -78,6 +78,7 @@ class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]):
     def __getitem__(self, n: int) -> _A: ...
     def __iter__(self) -> Iterator[_A]: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
     def __lt__(self, other: Self) -> bool: ...
     if sys.version_info >= (3, 11):
         def __ge__(self, other: Self) -> bool: ...
@@ -148,7 +149,10 @@ class _BaseV4:
 
 class IPv4Address(_BaseV4, _BaseAddress): ...
 class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): ...
-class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): ...
+
+class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]):
+    def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 class _BaseV6:
     @property
@@ -169,11 +173,16 @@ class IPv6Address(_BaseV6, _BaseAddress):
         @property
         def scope_id(self) -> str | None: ...
 
+    def __hash__(self) -> int: ...
+    def __eq__(self, other: object) -> bool: ...
+
 class IPv6Network(_BaseV6, _BaseNetwork[IPv6Address]):
     @property
     def is_site_local(self) -> bool: ...
 
-class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): ...
+class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]):
+    def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 def v4_int_to_packed(address: int) -> bytes: ...
 def v6_int_to_packed(address: int) -> bytes: ...
diff --git a/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/mypy/typeshed/stdlib/multiprocessing/managers.pyi
index 27a903fb9987..9cfc1ebbdd5e 100644
--- a/mypy/typeshed/stdlib/multiprocessing/managers.pyi
+++ b/mypy/typeshed/stdlib/multiprocessing/managers.pyi
@@ -73,14 +73,18 @@ class DictProxy(BaseProxy, MutableMapping[_KT, _VT]):
     def __delitem__(self, __key: _KT) -> None: ...
     def __iter__(self) -> Iterator[_KT]: ...
     def copy(self) -> dict[_KT, _VT]: ...
-    @overload
+    @overload  # type: ignore[override]
     def get(self, __key: _KT) -> _VT | None: ...
     @overload
-    def get(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ...
+    def get(self, __key: _KT, __default: _VT) -> _VT: ...
+    @overload
+    def get(self, __key: _KT, __default: _T) -> _VT | _T: ...
     @overload
     def pop(self, __key: _KT) -> _VT: ...
     @overload
-    def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ...
+    def pop(self, __key: _KT, __default: _VT) -> _VT: ...
+    @overload
+    def pop(self, __key: _KT, __default: _T) -> _VT | _T: ...
     def keys(self) -> list[_KT]: ...  # type: ignore[override]
     def items(self) -> list[tuple[_KT, _VT]]: ...  # type: ignore[override]
     def values(self) -> list[_VT]: ...  # type: ignore[override]
diff --git a/mypy/typeshed/stdlib/pdb.pyi b/mypy/typeshed/stdlib/pdb.pyi
index e0d69e7d30fa..4cc708d9d5fe 100644
--- a/mypy/typeshed/stdlib/pdb.pyi
+++ b/mypy/typeshed/stdlib/pdb.pyi
@@ -125,6 +125,9 @@ class Pdb(Bdb, Cmd):
     def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ...
     def message(self, msg: str) -> None: ...
     def error(self, msg: str) -> None: ...
+    if sys.version_info >= (3, 12):
+        def set_convenience_variable(self, frame: FrameType, name: str, value: Any) -> None: ...
+
     def _select_frame(self, number: int) -> None: ...
     def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ...
     def _print_lines(
diff --git a/mypy/typeshed/stdlib/pydoc.pyi b/mypy/typeshed/stdlib/pydoc.pyi
index c993af390bbb..7791c977aa8b 100644
--- a/mypy/typeshed/stdlib/pydoc.pyi
+++ b/mypy/typeshed/stdlib/pydoc.pyi
@@ -61,6 +61,7 @@ class Doc:
     def getdocloc(self, object: object, basedir: str = ...) -> str | None: ...
 
 class HTMLRepr(Repr):
+    def __init__(self) -> None: ...
     def escape(self, text: str) -> str: ...
     def repr(self, object: object) -> str: ...
     def repr1(self, x: object, level: complex) -> str: ...
@@ -148,6 +149,7 @@ class HTMLDoc(Doc):
     def filelink(self, url: str, path: str) -> str: ...
 
 class TextRepr(Repr):
+    def __init__(self) -> None: ...
     def repr1(self, x: object, level: complex) -> str: ...
     def repr_string(self, x: str, level: complex) -> str: ...
     def repr_str(self, x: str, level: complex) -> str: ...
diff --git a/mypy/typeshed/stdlib/re.pyi b/mypy/typeshed/stdlib/re.pyi
index 4e53141ade84..29ee8b66815e 100644
--- a/mypy/typeshed/stdlib/re.pyi
+++ b/mypy/typeshed/stdlib/re.pyi
@@ -175,6 +175,8 @@ class Pattern(Generic[AnyStr]):
     def subn(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> tuple[AnyStr, int]: ...
     def __copy__(self) -> Pattern[AnyStr]: ...
     def __deepcopy__(self, __memo: Any) -> Pattern[AnyStr]: ...
+    def __eq__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, item: Any) -> GenericAlias: ...
 
diff --git a/mypy/typeshed/stdlib/shelve.pyi b/mypy/typeshed/stdlib/shelve.pyi
index 82d0b03f4049..b162b3a85766 100644
--- a/mypy/typeshed/stdlib/shelve.pyi
+++ b/mypy/typeshed/stdlib/shelve.pyi
@@ -15,8 +15,10 @@ class Shelf(MutableMapping[str, _VT]):
     ) -> None: ...
     def __iter__(self) -> Iterator[str]: ...
     def __len__(self) -> int: ...
+    @overload  # type: ignore[override]
+    def get(self, key: str, default: None = None) -> _VT | None: ...  # type: ignore[misc]  # overlapping overloads
     @overload
-    def get(self, key: str) -> _VT | None: ...
+    def get(self, key: str, default: _VT) -> _VT: ...
     @overload
     def get(self, key: str, default: _T) -> _VT | _T: ...
     def __getitem__(self, key: str) -> _VT: ...
diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
index cff0f5e5ff1d..41f731e21e26 100644
--- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
+++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
@@ -233,8 +233,9 @@ def connect(
 ) -> Connection: ...
 def enable_callback_tracebacks(__enable: bool) -> None: ...
 
-# takes a pos-or-keyword argument because there is a C wrapper
-def enable_shared_cache(enable: int) -> None: ...
+if sys.version_info < (3, 12):
+    # takes a pos-or-keyword argument because there is a C wrapper
+    def enable_shared_cache(enable: int) -> None: ...
 
 if sys.version_info >= (3, 10):
     def register_adapter(__type: type[_T], __adapter: _Adapter[_T]) -> None: ...
@@ -298,6 +299,11 @@ class Connection:
     isolation_level: str | None  # one of '', 'DEFERRED', 'IMMEDIATE' or 'EXCLUSIVE'
     @property
     def total_changes(self) -> int: ...
+    if sys.version_info >= (3, 12):
+        @property
+        def autocommit(self) -> int: ...
+        @autocommit.setter
+        def autocommit(self, val: int) -> None: ...
     row_factory: Any
     text_factory: Any
     def __init__(
@@ -375,6 +381,9 @@ class Connection:
         def getlimit(self, __category: int) -> int: ...
         def serialize(self, *, name: str = "main") -> bytes: ...
         def deserialize(self, __data: ReadableBuffer, *, name: str = "main") -> None: ...
+    if sys.version_info >= (3, 12):
+        def getconfig(self, __op: int) -> bool: ...
+        def setconfig(self, __op: int, __enable: bool = True) -> bool: ...
 
     def __call__(self, __sql: str) -> _Statement: ...
     def __enter__(self) -> Self: ...
diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi
index 20b8802bd7b9..446bbf8d1009 100644
--- a/mypy/typeshed/stdlib/ssl.pyi
+++ b/mypy/typeshed/stdlib/ssl.pyi
@@ -485,6 +485,7 @@ class SSLSession:
     def time(self) -> int: ...
     @property
     def timeout(self) -> int: ...
+    def __eq__(self, __value: object) -> bool: ...
 
 class SSLErrorNumber(enum.IntEnum):
     SSL_ERROR_EOF: int
diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi
index d73566fc0917..bb416717a378 100644
--- a/mypy/typeshed/stdlib/tkinter/ttk.pyi
+++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi
@@ -953,8 +953,6 @@ class _TreeviewColumnDict(TypedDict):
     anchor: tkinter._Anchor
     id: str
 
-_TreeviewColumnId: TypeAlias = int | str  # manual page: "COLUMN IDENTIFIERS"
-
 class Treeview(Widget, tkinter.XView, tkinter.YView):
     def __init__(
         self,
@@ -963,7 +961,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
         class_: str = ...,
         columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = ...,
         cursor: tkinter._Cursor = ...,
-        displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ...,
+        displaycolumns: str | int | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ...,
         height: int = ...,
         name: str = ...,
         padding: _Padding = ...,
@@ -985,7 +983,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
         *,
         columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = ...,
         cursor: tkinter._Cursor = ...,
-        displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ...,
+        displaycolumns: str | int | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ...,
         height: int = ...,
         padding: _Padding = ...,
         selectmode: Literal["extended", "browse", "none"] = ...,
@@ -998,23 +996,23 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
     @overload
     def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ...
     config = configure
-    def bbox(self, item, column: _TreeviewColumnId | None = None) -> tuple[int, int, int, int] | Literal[""]: ...  # type: ignore[override]
-    def get_children(self, item: str | None = None) -> tuple[str, ...]: ...
-    def set_children(self, item: str, *newchildren: str) -> None: ...
+    def bbox(self, item: str | int, column: str | int | None = None) -> tuple[int, int, int, int] | Literal[""]: ...  # type: ignore[override]
+    def get_children(self, item: str | int | None = None) -> tuple[str, ...]: ...
+    def set_children(self, item: str | int, *newchildren: str | int) -> None: ...
     @overload
-    def column(self, column: _TreeviewColumnId, option: Literal["width", "minwidth"]) -> int: ...
+    def column(self, column: str | int, option: Literal["width", "minwidth"]) -> int: ...
     @overload
-    def column(self, column: _TreeviewColumnId, option: Literal["stretch"]) -> bool: ...  # actually 0 or 1
+    def column(self, column: str | int, option: Literal["stretch"]) -> bool: ...  # actually 0 or 1
     @overload
-    def column(self, column: _TreeviewColumnId, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ...
+    def column(self, column: str | int, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ...
     @overload
-    def column(self, column: _TreeviewColumnId, option: Literal["id"]) -> str: ...
+    def column(self, column: str | int, option: Literal["id"]) -> str: ...
     @overload
-    def column(self, column: _TreeviewColumnId, option: str) -> Any: ...
+    def column(self, column: str | int, option: str) -> Any: ...
     @overload
     def column(
         self,
-        column: _TreeviewColumnId,
+        column: str | int,
         option: None = None,
         *,
         width: int = ...,
@@ -1023,29 +1021,29 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
         anchor: tkinter._Anchor = ...,
         # id is read-only
     ) -> _TreeviewColumnDict | None: ...
-    def delete(self, *items: str) -> None: ...
-    def detach(self, *items: str) -> None: ...
-    def exists(self, item: str) -> bool: ...
+    def delete(self, *items: str | int) -> None: ...
+    def detach(self, *items: str | int) -> None: ...
+    def exists(self, item: str | int) -> bool: ...
     @overload  # type: ignore[override]
     def focus(self, item: None = None) -> str: ...  # can return empty string
     @overload
-    def focus(self, item: str) -> Literal[""]: ...
+    def focus(self, item: str | int) -> Literal[""]: ...
     @overload
-    def heading(self, column: _TreeviewColumnId, option: Literal["text"]) -> str: ...
+    def heading(self, column: str | int, option: Literal["text"]) -> str: ...
     @overload
-    def heading(self, column: _TreeviewColumnId, option: Literal["image"]) -> tuple[str] | str: ...
+    def heading(self, column: str | int, option: Literal["image"]) -> tuple[str] | str: ...
     @overload
-    def heading(self, column: _TreeviewColumnId, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ...
+    def heading(self, column: str | int, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ...
     @overload
-    def heading(self, column: _TreeviewColumnId, option: Literal["command"]) -> str: ...
+    def heading(self, column: str | int, option: Literal["command"]) -> str: ...
     @overload
-    def heading(self, column: _TreeviewColumnId, option: str) -> Any: ...
+    def heading(self, column: str | int, option: str) -> Any: ...
     @overload
-    def heading(self, column: _TreeviewColumnId, option: None = None) -> _TreeviewHeaderDict: ...  # type: ignore[misc]
+    def heading(self, column: str | int, option: None = None) -> _TreeviewHeaderDict: ...  # type: ignore[misc]
     @overload
     def heading(
         self,
-        column: _TreeviewColumnId,
+        column: str | int,
         option: None = None,
         *,
         text: str = ...,
@@ -1058,14 +1056,14 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
     def identify_column(self, x: int) -> str: ...
     def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: ...
     def identify_element(self, x: int, y: int) -> str: ...  # don't know what possible return values are
-    def index(self, item: str) -> int: ...
+    def index(self, item: str | int) -> int: ...
     def insert(
         self,
         parent: str,
         index: int | Literal["end"],
-        iid: str | None = None,
+        iid: str | int | None = None,
         *,
-        id: str = ...,  # same as iid
+        id: str | int = ...,  # same as iid
         text: str = ...,
         image: tkinter._ImageSpec = ...,
         values: list[Any] | tuple[Any, ...] = ...,
@@ -1073,23 +1071,23 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
         tags: str | list[str] | tuple[str, ...] = ...,
     ) -> str: ...
     @overload
-    def item(self, item: str, option: Literal["text"]) -> str: ...
+    def item(self, item: str | int, option: Literal["text"]) -> str: ...
     @overload
-    def item(self, item: str, option: Literal["image"]) -> tuple[str] | Literal[""]: ...
+    def item(self, item: str | int, option: Literal["image"]) -> tuple[str] | Literal[""]: ...
     @overload
-    def item(self, item: str, option: Literal["values"]) -> tuple[Any, ...] | Literal[""]: ...
+    def item(self, item: str | int, option: Literal["values"]) -> tuple[Any, ...] | Literal[""]: ...
     @overload
-    def item(self, item: str, option: Literal["open"]) -> bool: ...  # actually 0 or 1
+    def item(self, item: str | int, option: Literal["open"]) -> bool: ...  # actually 0 or 1
     @overload
-    def item(self, item: str, option: Literal["tags"]) -> tuple[str, ...] | Literal[""]: ...
+    def item(self, item: str | int, option: Literal["tags"]) -> tuple[str, ...] | Literal[""]: ...
     @overload
-    def item(self, item: str, option: str) -> Any: ...
+    def item(self, item: str | int, option: str) -> Any: ...
     @overload
-    def item(self, item: str, option: None = None) -> _TreeviewItemDict: ...  # type: ignore[misc]
+    def item(self, item: str | int, option: None = None) -> _TreeviewItemDict: ...  # type: ignore[misc]
     @overload
     def item(
         self,
-        item: str,
+        item: str | int,
         option: None = None,
         *,
         text: str = ...,
@@ -1098,27 +1096,39 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
         open: bool = ...,
         tags: str | list[str] | tuple[str, ...] = ...,
     ) -> None: ...
-    def move(self, item: str, parent: str, index: int) -> None: ...
+    def move(self, item: str | int, parent: str, index: int) -> None: ...
     reattach = move
-    def next(self, item: str) -> str: ...  # returning empty string means last item
-    def parent(self, item: str) -> str: ...
-    def prev(self, item: str) -> str: ...  # returning empty string means first item
-    def see(self, item: str) -> None: ...
+    def next(self, item: str | int) -> str: ...  # returning empty string means last item
+    def parent(self, item: str | int) -> str: ...
+    def prev(self, item: str | int) -> str: ...  # returning empty string means first item
+    def see(self, item: str | int) -> None: ...
     if sys.version_info >= (3, 8):
         def selection(self) -> tuple[str, ...]: ...
     else:
         def selection(self, selop: Incomplete | None = ..., items: Incomplete | None = None) -> tuple[str, ...]: ...
 
-    def selection_set(self, items: str | list[str] | tuple[str, ...]) -> None: ...
-    def selection_add(self, items: str | list[str] | tuple[str, ...]) -> None: ...
-    def selection_remove(self, items: str | list[str] | tuple[str, ...]) -> None: ...
-    def selection_toggle(self, items: str | list[str] | tuple[str, ...]) -> None: ...
     @overload
-    def set(self, item: str, column: None = None, value: None = None) -> dict[str, Any]: ...
+    def selection_set(self, __items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...]) -> None: ...
+    @overload
+    def selection_set(self, *items: str | int) -> None: ...
+    @overload
+    def selection_add(self, __items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...]) -> None: ...
+    @overload
+    def selection_add(self, *items: str | int) -> None: ...
+    @overload
+    def selection_remove(self, __items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...]) -> None: ...
+    @overload
+    def selection_remove(self, *items: str | int) -> None: ...
+    @overload
+    def selection_toggle(self, __items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...]) -> None: ...
+    @overload
+    def selection_toggle(self, *items: str | int) -> None: ...
+    @overload
+    def set(self, item: str | int, column: None = None, value: None = None) -> dict[str, Any]: ...
     @overload
-    def set(self, item: str, column: _TreeviewColumnId, value: None = None) -> Any: ...
+    def set(self, item: str | int, column: str | int, value: None = None) -> Any: ...
     @overload
-    def set(self, item: str, column: _TreeviewColumnId, value: Any) -> Literal[""]: ...
+    def set(self, item: str | int, column: str | int, value: Any) -> Literal[""]: ...
     # There's no tag_unbind() or 'add' argument for whatever reason.
     # Also, it's 'callback' instead of 'func' here.
     @overload
@@ -1150,7 +1160,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
     @overload
     def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: ...
     @overload
-    def tag_has(self, tagname: str, item: str) -> bool: ...
+    def tag_has(self, tagname: str, item: str | int) -> bool: ...
 
 class LabeledScale(Frame):
     label: Incomplete
diff --git a/mypy/typeshed/stdlib/traceback.pyi b/mypy/typeshed/stdlib/traceback.pyi
index a6d6d3e168b3..47449dfe8143 100644
--- a/mypy/typeshed/stdlib/traceback.pyi
+++ b/mypy/typeshed/stdlib/traceback.pyi
@@ -1,5 +1,5 @@
 import sys
-from _typeshed import SupportsWrite
+from _typeshed import SupportsWrite, Unused
 from collections.abc import Generator, Iterable, Iterator, Mapping
 from types import FrameType, TracebackType
 from typing import Any, overload
@@ -84,7 +84,10 @@ def format_list(extracted_list: list[FrameSummary]) -> list[str]: ...
 def print_list(extracted_list: list[FrameSummary], file: SupportsWrite[str] | None = None) -> None: ...
 
 if sys.version_info >= (3, 10):
-    def format_exception_only(__exc: type[BaseException] | None, value: BaseException | None = ...) -> list[str]: ...
+    @overload
+    def format_exception_only(__exc: BaseException | None) -> list[str]: ...
+    @overload
+    def format_exception_only(__exc: Unused, value: BaseException | None) -> list[str]: ...
 
 else:
     def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ...
diff --git a/mypy/typeshed/stdlib/tracemalloc.pyi b/mypy/typeshed/stdlib/tracemalloc.pyi
index 3dc8b8603fe5..6448a16ce11a 100644
--- a/mypy/typeshed/stdlib/tracemalloc.pyi
+++ b/mypy/typeshed/stdlib/tracemalloc.pyi
@@ -37,6 +37,7 @@ class Statistic:
     traceback: Traceback
     def __init__(self, traceback: Traceback, size: int, count: int) -> None: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 class StatisticDiff:
     count: int
@@ -46,6 +47,7 @@ class StatisticDiff:
     traceback: Traceback
     def __init__(self, traceback: Traceback, size: int, size_diff: int, count: int, count_diff: int) -> None: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 _FrameTuple: TypeAlias = tuple[str, int]
 
@@ -56,6 +58,7 @@ class Frame:
     def lineno(self) -> int: ...
     def __init__(self, frame: _FrameTuple) -> None: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
     def __lt__(self, other: Frame) -> bool: ...
     if sys.version_info >= (3, 11):
         def __gt__(self, other: Frame) -> bool: ...
@@ -80,6 +83,7 @@ class Trace:
     def traceback(self) -> Traceback: ...
     def __init__(self, trace: _TraceTuple) -> None: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 class Traceback(Sequence[Frame]):
     if sys.version_info >= (3, 9):
@@ -97,6 +101,7 @@ class Traceback(Sequence[Frame]):
     def __contains__(self, frame: Frame) -> bool: ...  # type: ignore[override]
     def __len__(self) -> int: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
     def __lt__(self, other: Traceback) -> bool: ...
     if sys.version_info >= (3, 11):
         def __gt__(self, other: Traceback) -> bool: ...
diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi
index e5468ce4ed3c..2f4bd1a88047 100644
--- a/mypy/typeshed/stdlib/types.pyi
+++ b/mypy/typeshed/stdlib/types.pyi
@@ -71,6 +71,7 @@ class _Cell:
     if sys.version_info >= (3, 8):
         def __init__(self, __contents: object = ...) -> None: ...
 
+    def __eq__(self, __value: object) -> bool: ...
     __hash__: ClassVar[None]  # type: ignore[assignment]
     cell_contents: Any
 
@@ -113,6 +114,8 @@ LambdaType = FunctionType
 
 @final
 class CodeType:
+    def __eq__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
     @property
     def co_argcount(self) -> int: ...
     if sys.version_info >= (3, 8):
@@ -326,6 +329,7 @@ class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]):
 class SimpleNamespace:
     __hash__: ClassVar[None]  # type: ignore[assignment]
     def __init__(self, **kwargs: Any) -> None: ...
+    def __eq__(self, __value: object) -> bool: ...
     def __getattribute__(self, __name: str) -> Any: ...
     def __setattr__(self, __name: str, __value: Any) -> None: ...
     def __delattr__(self, __name: str) -> None: ...
@@ -442,6 +446,8 @@ class MethodType:
     def __qualname__(self) -> str: ...  # inherited from the added function
     def __init__(self, __func: Callable[..., Any], __obj: object) -> None: ...
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+    def __eq__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 @final
 class BuiltinFunctionType:
@@ -452,6 +458,8 @@ class BuiltinFunctionType:
     @property
     def __qualname__(self) -> str: ...
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+    def __eq__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 BuiltinMethodType = BuiltinFunctionType
 
@@ -479,6 +487,7 @@ class MethodWrapperType:
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
     def __eq__(self, __value: object) -> bool: ...
     def __ne__(self, __value: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 @final
 class MethodDescriptorType:
@@ -603,6 +612,8 @@ if sys.version_info >= (3, 9):
         def __parameters__(self) -> tuple[Any, ...]: ...
         def __init__(self, origin: type, args: Any) -> None: ...
         def __getitem__(self, __typeargs: Any) -> GenericAlias: ...
+        def __eq__(self, __value: object) -> bool: ...
+        def __hash__(self) -> int: ...
         if sys.version_info >= (3, 11):
             @property
             def __unpacked__(self) -> bool: ...
@@ -626,3 +637,5 @@ if sys.version_info >= (3, 10):
         def __args__(self) -> tuple[Any, ...]: ...
         def __or__(self, __value: Any) -> UnionType: ...
         def __ror__(self, __value: Any) -> UnionType: ...
+        def __eq__(self, __value: object) -> bool: ...
+        def __hash__(self) -> int: ...
diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi
index 7496a0920690..6a307368642f 100644
--- a/mypy/typeshed/stdlib/typing.pyi
+++ b/mypy/typeshed/stdlib/typing.pyi
@@ -226,12 +226,14 @@ if sys.version_info >= (3, 10):
         @property
         def __origin__(self) -> ParamSpec: ...
         def __init__(self, origin: ParamSpec) -> None: ...
+        def __eq__(self, other: object) -> bool: ...
 
     @_final
     class ParamSpecKwargs:
         @property
         def __origin__(self) -> ParamSpec: ...
         def __init__(self, origin: ParamSpec) -> None: ...
+        def __eq__(self, other: object) -> bool: ...
 
     @_final
     class ParamSpec:
@@ -563,6 +565,7 @@ class AbstractSet(Collection[_T_co], Generic[_T_co]):
     def __or__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ...
     def __sub__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ...
     def __xor__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ...
+    def __eq__(self, other: object) -> bool: ...
     def isdisjoint(self, other: Iterable[Any]) -> bool: ...
 
 class MutableSet(AbstractSet[_T], Generic[_T]):
@@ -647,7 +650,9 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
     @overload
     def pop(self, __key: _KT) -> _VT: ...
     @overload
-    def pop(self, __key: _KT, default: _VT | _T) -> _VT | _T: ...
+    def pop(self, __key: _KT, default: _VT) -> _VT: ...
+    @overload
+    def pop(self, __key: _KT, default: _T) -> _VT | _T: ...
     def popitem(self) -> tuple[_KT, _VT]: ...
     # This overload should be allowed only if the value type is compatible with None.
     #
@@ -953,3 +958,7 @@ if sys.version_info >= (3, 12):
         if sys.version_info >= (3, 10):
             def __or__(self, right: Any) -> _SpecialForm: ...
             def __ror__(self, left: Any) -> _SpecialForm: ...
+
+if sys.version_info >= (3, 13):
+    def is_protocol(__tp: type) -> bool: ...
+    def get_protocol_members(__tp: type) -> frozenset[str]: ...
diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi
index 93087a45a108..efcc13e42047 100644
--- a/mypy/typeshed/stdlib/typing_extensions.pyi
+++ b/mypy/typeshed/stdlib/typing_extensions.pyi
@@ -4,26 +4,52 @@ import sys
 import typing
 from _collections_abc import dict_items, dict_keys, dict_values
 from _typeshed import IdentityFunction, Incomplete
-from collections.abc import Iterable
-from typing import (  # noqa: Y022,Y039
+from typing import (  # noqa: Y022,Y037,Y038,Y039
+    IO as IO,
     TYPE_CHECKING as TYPE_CHECKING,
+    AbstractSet as AbstractSet,
     Any as Any,
+    AnyStr as AnyStr,
     AsyncContextManager as AsyncContextManager,
     AsyncGenerator as AsyncGenerator,
     AsyncIterable as AsyncIterable,
     AsyncIterator as AsyncIterator,
     Awaitable as Awaitable,
-    Callable,
+    BinaryIO as BinaryIO,
+    Callable as Callable,
     ChainMap as ChainMap,
     ClassVar as ClassVar,
+    Collection as Collection,
+    Container as Container,
     ContextManager as ContextManager,
     Coroutine as Coroutine,
     Counter as Counter,
     DefaultDict as DefaultDict,
     Deque as Deque,
-    Mapping,
+    Dict as Dict,
+    ForwardRef as ForwardRef,
+    FrozenSet as FrozenSet,
+    Generator as Generator,
+    Generic as Generic,
+    Hashable as Hashable,
+    ItemsView as ItemsView,
+    Iterable as Iterable,
+    Iterator as Iterator,
+    KeysView as KeysView,
+    List as List,
+    Mapping as Mapping,
+    MappingView as MappingView,
+    Match as Match,
+    MutableMapping as MutableMapping,
+    MutableSequence as MutableSequence,
+    MutableSet as MutableSet,
     NoReturn as NoReturn,
-    Sequence,
+    Optional as Optional,
+    Pattern as Pattern,
+    Reversible as Reversible,
+    Sequence as Sequence,
+    Set as Set,
+    Sized as Sized,
     SupportsAbs as SupportsAbs,
     SupportsBytes as SupportsBytes,
     SupportsComplex as SupportsComplex,
@@ -31,8 +57,15 @@ from typing import (  # noqa: Y022,Y039
     SupportsInt as SupportsInt,
     SupportsRound as SupportsRound,
     Text as Text,
+    TextIO as TextIO,
+    Tuple as Tuple,
     Type as Type,
+    Union as Union,
+    ValuesView as ValuesView,
     _Alias,
+    cast as cast,
+    no_type_check as no_type_check,
+    no_type_check_decorator as no_type_check_decorator,
     overload as overload,
     type_check_only,
 )
@@ -109,11 +142,50 @@ __all__ = [
     "get_original_bases",
     "get_overloads",
     "get_type_hints",
+    "AbstractSet",
+    "AnyStr",
+    "BinaryIO",
+    "Callable",
+    "Collection",
+    "Container",
+    "Dict",
+    "ForwardRef",
+    "FrozenSet",
+    "Generator",
+    "Generic",
+    "Hashable",
+    "IO",
+    "ItemsView",
+    "Iterable",
+    "Iterator",
+    "KeysView",
+    "List",
+    "Mapping",
+    "MappingView",
+    "Match",
+    "MutableMapping",
+    "MutableSequence",
+    "MutableSet",
+    "Optional",
+    "Pattern",
+    "Reversible",
+    "Sequence",
+    "Set",
+    "Sized",
+    "TextIO",
+    "Tuple",
+    "Union",
+    "ValuesView",
+    "cast",
+    "get_protocol_members",
+    "is_protocol",
+    "no_type_check",
+    "no_type_check_decorator",
 ]
 
 _T = typing.TypeVar("_T")
 _F = typing.TypeVar("_F", bound=Callable[..., Any])
-_TC = typing.TypeVar("_TC", bound=Type[object])
+_TC = typing.TypeVar("_TC", bound=type[object])
 
 # unfortunately we have to duplicate this class definition from typing.pyi or we break pytype
 class _SpecialForm:
@@ -403,3 +475,9 @@ else:
         # Not actually a Protocol at runtime; see
         # https://github.com/python/typeshed/issues/10224 for why we're defining it this way
         def __buffer__(self, __flags: int) -> memoryview: ...
+
+if sys.version_info >= (3, 13):
+    from typing import get_protocol_members as get_protocol_members, is_protocol as is_protocol
+else:
+    def is_protocol(__tp: type) -> bool: ...
+    def get_protocol_members(__tp: type) -> frozenset[str]: ...
diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi
index 45c39e3f3010..1f58f266ee89 100644
--- a/mypy/typeshed/stdlib/unittest/case.pyi
+++ b/mypy/typeshed/stdlib/unittest/case.pyi
@@ -86,6 +86,7 @@ class TestCase:
     _testMethodDoc: str
     def __init__(self, methodName: str = "runTest") -> None: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
     def setUp(self) -> None: ...
     def tearDown(self) -> None: ...
     @classmethod
@@ -304,6 +305,8 @@ class FunctionTestCase(TestCase):
         description: str | None = None,
     ) -> None: ...
     def runTest(self) -> None: ...
+    def __hash__(self) -> int: ...
+    def __eq__(self, other: object) -> bool: ...
 
 class _AssertRaisesContext(Generic[_E]):
     exception: _E
diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi
index db1cc7d9bfc9..66120197b269 100644
--- a/mypy/typeshed/stdlib/unittest/mock.pyi
+++ b/mypy/typeshed/stdlib/unittest/mock.pyi
@@ -389,7 +389,11 @@ if sys.version_info >= (3, 8):
     class AsyncMagicMixin(MagicMixin):
         def __init__(self, *args: Any, **kw: Any) -> None: ...
 
-    class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): ...
+    class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock):
+        # Improving the `reset_mock` signature.
+        # It is defined on `AsyncMockMixin` with `*args, **kwargs`, which is not ideal.
+        # But, `NonCallableMock` super-class has the better version.
+        def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ...
 
 class MagicProxy:
     name: str
diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi
index 8f99c5837871..079c9755528c 100644
--- a/mypy/typeshed/stdlib/urllib/request.pyi
+++ b/mypy/typeshed/stdlib/urllib/request.pyi
@@ -173,7 +173,7 @@ class HTTPPasswordMgr:
     def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ...
     def find_user_password(self, realm: str, authuri: str) -> tuple[str | None, str | None]: ...
     def is_suburi(self, base: str, test: str) -> bool: ...  # undocumented
-    def reduce_uri(self, uri: str, default_port: bool = True) -> str: ...  # undocumented
+    def reduce_uri(self, uri: str, default_port: bool = True) -> tuple[str, str]: ...  # undocumented
 
 class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
     def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ...
@@ -184,7 +184,7 @@ class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm):
         self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str, is_authenticated: bool = False
     ) -> None: ...
     def update_authenticated(self, uri: str | Sequence[str], is_authenticated: bool = False) -> None: ...
-    def is_authenticated(self, authuri: str) -> bool: ...
+    def is_authenticated(self, authuri: str) -> bool | None: ...
 
 class AbstractBasicAuthHandler:
     rx: ClassVar[Pattern[str]]  # undocumented
@@ -212,7 +212,7 @@ class AbstractDigestAuthHandler:
     def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: HTTPMessage) -> None: ...
     def retry_http_digest_auth(self, req: Request, auth: str) -> _UrlopenRet | None: ...
     def get_cnonce(self, nonce: str) -> str: ...
-    def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ...
+    def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str | None: ...
     def get_algorithm_impls(self, algorithm: str) -> tuple[Callable[[str], str], Callable[[str, str], str]]: ...
     def get_entity_digest(self, data: ReadableBuffer | None, chal: Mapping[str, str]) -> str | None: ...
 
@@ -269,7 +269,7 @@ class ftpwrapper:  # undocumented
     def file_close(self) -> None: ...
     def init(self) -> None: ...
     def real_close(self) -> None: ...
-    def retrfile(self, file: str, type: str) -> tuple[addclosehook, int]: ...
+    def retrfile(self, file: str, type: str) -> tuple[addclosehook, int | None]: ...
 
 class FTPHandler(BaseHandler):
     def ftp_open(self, req: Request) -> addinfourl: ...
diff --git a/mypy/typeshed/stdlib/uuid.pyi b/mypy/typeshed/stdlib/uuid.pyi
index fd87646531a6..e1ea424f9680 100644
--- a/mypy/typeshed/stdlib/uuid.pyi
+++ b/mypy/typeshed/stdlib/uuid.pyi
@@ -63,6 +63,7 @@ class UUID:
     def __le__(self, other: UUID) -> bool: ...
     def __gt__(self, other: UUID) -> bool: ...
     def __ge__(self, other: UUID) -> bool: ...
+    def __hash__(self) -> builtins.int: ...
 
 if sys.version_info >= (3, 9):
     def getnode() -> int: ...
diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi
index 13f48fe85a8d..ecb98d4269d5 100644
--- a/mypy/typeshed/stdlib/weakref.pyi
+++ b/mypy/typeshed/stdlib/weakref.pyi
@@ -45,6 +45,7 @@ class WeakMethod(ref[_CallableT], Generic[_CallableT]):
     def __call__(self) -> _CallableT | None: ...
     def __eq__(self, other: object) -> bool: ...
     def __ne__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 class WeakValueDictionary(MutableMapping[_KT, _VT]):
     @overload
@@ -74,7 +75,9 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]):
     @overload
     def pop(self, key: _KT) -> _VT: ...
     @overload
-    def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ...
+    def pop(self, key: _KT, default: _VT) -> _VT: ...
+    @overload
+    def pop(self, key: _KT, default: _T) -> _VT | _T: ...
     if sys.version_info >= (3, 9):
         def __or__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ...
         def __ror__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ...
@@ -117,7 +120,9 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]):
     @overload
     def pop(self, key: _KT) -> _VT: ...
     @overload
-    def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ...
+    def pop(self, key: _KT, default: _VT) -> _VT: ...
+    @overload
+    def pop(self, key: _KT, default: _T) -> _VT | _T: ...
     if sys.version_info >= (3, 9):
         def __or__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ...
         def __ror__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ...
diff --git a/mypy/typeshed/stdlib/winreg.pyi b/mypy/typeshed/stdlib/winreg.pyi
index 70ea6a1ced11..337bd9706050 100644
--- a/mypy/typeshed/stdlib/winreg.pyi
+++ b/mypy/typeshed/stdlib/winreg.pyi
@@ -98,3 +98,4 @@ if sys.platform == "win32":
         ) -> bool | None: ...
         def Close(self) -> None: ...
         def Detach(self) -> int: ...
+        def __hash__(self) -> int: ...
diff --git a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi
index 2cf8dbbe7025..d8ff2f5b6090 100644
--- a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi
+++ b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi
@@ -142,6 +142,7 @@ class QName:
     def __gt__(self, other: QName | str) -> bool: ...
     def __ge__(self, other: QName | str) -> bool: ...
     def __eq__(self, other: object) -> bool: ...
+    def __hash__(self) -> int: ...
 
 class ElementTree:
     def __init__(self, element: Element | None = None, file: _FileRead | None = None) -> None: ...
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index 754cb21c3ff8..58dfb172cf76 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -1482,14 +1482,12 @@ frozenset({1}) == [1]  # Error
 {1: 2}.keys() == frozenset({1})
 {1: 2}.items() == {(1, 2)}
 
-{1: 2}.keys() == {'no'}  # Error
+{1: 2}.keys() == {'no'}  # OK
 {1: 2}.values() == {2}  # Error
-{1: 2}.keys() == [1]  # Error
+{1: 2}.keys() == [1]  # OK
 [out]
 _testStrictEqualityAllowlist.py:5: error: Non-overlapping equality check (left operand type: "FrozenSet[int]", right operand type: "List[int]")
-_testStrictEqualityAllowlist.py:11: error: Non-overlapping equality check (left operand type: "dict_keys[int, int]", right operand type: "Set[str]")
 _testStrictEqualityAllowlist.py:12: error: Non-overlapping equality check (left operand type: "dict_values[int, int]", right operand type: "Set[int]")
-_testStrictEqualityAllowlist.py:13: error: Non-overlapping equality check (left operand type: "dict_keys[int, int]", right operand type: "List[int]")
 
 [case testUnreachableWithStdlibContextManagers]
 # mypy: warn-unreachable, strict-optional

From 0e4521aa2d0b643b67777b4136bc27d97e622c56 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Sat, 12 Aug 2023 04:36:03 -0400
Subject: [PATCH 043/288] Fix inheriting from generic @frozen attrs class
 (#15700)

Fixes #15658.
---
 mypy/plugins/attrs.py                  |  2 +-
 test-data/unit/check-plugin-attrs.test | 24 ++++++++++++++++++++++++
 2 files changed, 25 insertions(+), 1 deletion(-)

diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py
index 0f748cc140e8..d444c18852dd 100644
--- a/mypy/plugins/attrs.py
+++ b/mypy/plugins/attrs.py
@@ -803,7 +803,7 @@ def _make_frozen(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute])
         else:
             # This variable belongs to a super class so create new Var so we
             # can modify it.
-            var = Var(attribute.name, ctx.cls.info[attribute.name].type)
+            var = Var(attribute.name, attribute.init_type)
             var.info = ctx.cls.info
             var._fullname = f"{ctx.cls.info.fullname}.{var.name}"
             ctx.cls.info.names[var.name] = SymbolTableNode(MDEF, var)
diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test
index 7a7bcb65fe98..3534d206c060 100644
--- a/test-data/unit/check-plugin-attrs.test
+++ b/test-data/unit/check-plugin-attrs.test
@@ -2253,3 +2253,27 @@ c = attrs.assoc(c, name=42)  # E: Argument "name" to "assoc" of "C" has incompat
 
 [builtins fixtures/plugin_attrs.pyi]
 [typing fixtures/typing-medium.pyi]
+
+[case testFrozenInheritFromGeneric]
+from typing import Generic, TypeVar
+from attrs import field, frozen
+
+T = TypeVar('T')
+
+def f(s: str) -> int:
+    ...
+
+@frozen
+class A(Generic[T]):
+    x: T
+    y: int = field(converter=f)
+
+@frozen
+class B(A[int]):
+    pass
+
+b = B(42, 'spam')
+reveal_type(b.x)  # N: Revealed type is "builtins.int"
+reveal_type(b.y)  # N: Revealed type is "builtins.int"
+
+[builtins fixtures/plugin_attrs.pyi]

From 742b5c68cae5b33c7b53768e874d9bab4344567e Mon Sep 17 00:00:00 2001
From: Jannic Warken <jannic.warken@gmail.com>
Date: Sat, 12 Aug 2023 10:56:09 +0200
Subject: [PATCH 044/288] Support __bool__ with Literal in --warn-unreachable
 (#15645)

This adds support for `Literal` as return type of `__bool__` in the
reachability analysis.

Fixes https://github.com/python/mypy/issues/7008
---
 mypy/typeops.py                            | 12 +++-----
 test-data/unit/check-unreachable-code.test | 32 ++++++++++++++++++++++
 2 files changed, 36 insertions(+), 8 deletions(-)

diff --git a/mypy/typeops.py b/mypy/typeops.py
index 65ab4340403c..4233cc1b2b33 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -602,10 +602,8 @@ def true_only(t: Type) -> ProperType:
     else:
         ret_type = _get_type_special_method_bool_ret_type(t)
 
-        if ret_type and ret_type.can_be_false and not ret_type.can_be_true:
-            new_t = copy_type(t)
-            new_t.can_be_true = False
-            return new_t
+        if ret_type and not ret_type.can_be_true:
+            return UninhabitedType(line=t.line, column=t.column)
 
         new_t = copy_type(t)
         new_t.can_be_false = False
@@ -637,10 +635,8 @@ def false_only(t: Type) -> ProperType:
     else:
         ret_type = _get_type_special_method_bool_ret_type(t)
 
-        if ret_type and ret_type.can_be_true and not ret_type.can_be_false:
-            new_t = copy_type(t)
-            new_t.can_be_false = False
-            return new_t
+        if ret_type and not ret_type.can_be_false:
+            return UninhabitedType(line=t.line)
 
         new_t = copy_type(t)
         new_t.can_be_true = False
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
index 7a6c2cbfd1c7..20b5dea9fc87 100644
--- a/test-data/unit/check-unreachable-code.test
+++ b/test-data/unit/check-unreachable-code.test
@@ -1379,6 +1379,38 @@ def f() -> None:
         x = 1  # E: Statement is unreachable
 [builtins fixtures/dict.pyi]
 
+[case testUnreachableLiteralFrom__bool__]
+# flags: --warn-unreachable
+from typing_extensions import Literal
+
+class Truth:
+    def __bool__(self) -> Literal[True]: ...
+
+class Lie:
+    def __bool__(self) -> Literal[False]: ...
+
+class Maybe:
+    def __bool__(self) -> Literal[True | False]: ...
+
+t = Truth()
+if t:
+    x = 1
+else:
+    x = 2  # E: Statement is unreachable
+
+if Lie():
+    x = 3  # E: Statement is unreachable
+
+if Maybe():
+    x = 4
+
+
+def foo() -> bool: ...
+
+y = Truth() or foo()  # E: Right operand of "or" is never evaluated
+z = Lie() and foo()  # E: Right operand of "and" is never evaluated
+[builtins fixtures/dict.pyi]
+
 [case testUnreachableModuleBody1]
 # flags: --warn-unreachable
 from typing import NoReturn

From 9dbb1232c0f2158f72d099d35d85252696d308ff Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Sat, 12 Aug 2023 11:05:08 +0200
Subject: [PATCH 045/288] Apply TypeVar defaults to functions (PEP 696)
 (#15387)

Use TypeVar defaults to resolve fallback return type of functions.

**Note**: Defaults for TypeVarTuples don't yet work, probably a result
of the limited support for `Unpack` / `TypeVarTuple`.

Ref: #14851
---
 mypy/applytype.py                          |  5 +++-
 test-data/unit/check-typevar-defaults.test | 35 ++++++++++++++++++++++
 2 files changed, 39 insertions(+), 1 deletion(-)

diff --git a/mypy/applytype.py b/mypy/applytype.py
index 55a51d4adbb6..f8be63362a6b 100644
--- a/mypy/applytype.py
+++ b/mypy/applytype.py
@@ -18,6 +18,7 @@
     TypeVarLikeType,
     TypeVarTupleType,
     TypeVarType,
+    UninhabitedType,
     UnpackType,
     get_proper_type,
 )
@@ -32,13 +33,15 @@ def get_target_type(
     context: Context,
     skip_unsatisfied: bool,
 ) -> Type | None:
+    p_type = get_proper_type(type)
+    if isinstance(p_type, UninhabitedType) and tvar.has_default():
+        return tvar.default
     if isinstance(tvar, ParamSpecType):
         return type
     if isinstance(tvar, TypeVarTupleType):
         return type
     assert isinstance(tvar, TypeVarType)
     values = tvar.values
-    p_type = get_proper_type(type)
     if values:
         if isinstance(p_type, AnyType):
             return type
diff --git a/test-data/unit/check-typevar-defaults.test b/test-data/unit/check-typevar-defaults.test
index 514186aa7518..36ec125eb1a4 100644
--- a/test-data/unit/check-typevar-defaults.test
+++ b/test-data/unit/check-typevar-defaults.test
@@ -81,3 +81,38 @@ T2 = TypeVar("T2", bound=List[str], default=List[int])  # E: TypeVar default mus
 T3 = TypeVar("T3", int, str, default=bytes)  # E: TypeVar default must be one of the constraint types
 T4 = TypeVar("T4", int, str, default=Union[int, str])  # E: TypeVar default must be one of the constraint types
 T5 = TypeVar("T5", float, str, default=int)  # E: TypeVar default must be one of the constraint types
+
+[case testTypeVarDefaultsFunctions]
+from typing import TypeVar, ParamSpec, List, Union, Callable, Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+T1 = TypeVar("T1", default=str)
+T2 = TypeVar("T2", bound=str, default=str)
+T3 = TypeVar("T3", bytes, str, default=str)
+P1 = ParamSpec("P1", default=[int, str])
+Ts1 = TypeVarTuple("Ts1", default=Unpack[Tuple[int, str]])
+
+def callback1(x: str) -> None: ...
+
+def func_a1(x: Union[int, T1]) -> T1: ...
+reveal_type(func_a1(2))  # N: Revealed type is "builtins.str"
+reveal_type(func_a1(2.1))  # N: Revealed type is "builtins.float"
+
+def func_a2(x: Union[int, T1]) -> List[T1]: ...
+reveal_type(func_a2(2))  # N: Revealed type is "builtins.list[builtins.str]"
+reveal_type(func_a2(2.1))  # N: Revealed type is "builtins.list[builtins.float]"
+
+def func_a3(x: Union[int, T2]) -> T2: ...
+reveal_type(func_a3(2))  # N: Revealed type is "builtins.str"
+
+def func_a4(x: Union[int, T3]) -> T3: ...
+reveal_type(func_a4(2))  # N: Revealed type is "builtins.str"
+
+def func_b1(x: Union[int, Callable[P1, None]]) -> Callable[P1, None]: ...
+reveal_type(func_b1(callback1))  # N: Revealed type is "def (x: builtins.str)"
+reveal_type(func_b1(2))  # N: Revealed type is "def (builtins.int, builtins.str)"
+
+def func_c1(x: Union[int, Callable[[Unpack[Ts1]], None]]) -> Tuple[Unpack[Ts1]]: ...
+# reveal_type(func_c1(callback1))  # Revealed type is "builtins.tuple[str]"  # TODO
+# reveal_type(func_c1(2))  # Revealed type is "builtins.tuple[builtins.int, builtins.str]"  # TODO
+[builtins fixtures/tuple.pyi]

From 3631528796cbf2a5a825d6a5fb2010853a0a46bd Mon Sep 17 00:00:00 2001
From: Gregory Santosa <94944372+gregorysantosa@users.noreply.github.com>
Date: Sat, 12 Aug 2023 02:10:27 -0700
Subject: [PATCH 046/288] 'await' in non-async function is a blocking error
 (#15384)

Fixes, https://github.com/python/mypy/issues/15339
---
 mypy/errorcodes.py                    | 4 +++-
 mypy/semanal.py                       | 7 ++++++-
 test-data/unit/check-async-await.test | 2 +-
 3 files changed, 10 insertions(+), 3 deletions(-)

diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py
index e7d0c16f2d2d..3594458fa362 100644
--- a/mypy/errorcodes.py
+++ b/mypy/errorcodes.py
@@ -152,7 +152,9 @@ def __hash__(self) -> int:
 TOP_LEVEL_AWAIT: Final = ErrorCode(
     "top-level-await", "Warn about top level await expressions", "General"
 )
-
+AWAIT_NOT_ASYNC: Final = ErrorCode(
+    "await-not-async", 'Warn about "await" outside coroutine ("async def")', "General"
+)
 # These error codes aren't enabled by default.
 NO_UNTYPED_DEF: Final[ErrorCode] = ErrorCode(
     "no-untyped-def", "Check that every function has an annotation", "General"
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 5b1aea4239f5..e21fc9f1c23f 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -5455,7 +5455,12 @@ def visit_await_expr(self, expr: AwaitExpr) -> None:
             # support top level awaits.
             self.fail('"await" outside function', expr, serious=True, code=codes.TOP_LEVEL_AWAIT)
         elif not self.function_stack[-1].is_coroutine:
-            self.fail('"await" outside coroutine ("async def")', expr, serious=True, blocker=True)
+            self.fail(
+                '"await" outside coroutine ("async def")',
+                expr,
+                serious=True,
+                code=codes.AWAIT_NOT_ASYNC,
+            )
         expr.expr.accept(self)
 
     #
diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test
index af6c31624b96..653025a0bb24 100644
--- a/test-data/unit/check-async-await.test
+++ b/test-data/unit/check-async-await.test
@@ -974,7 +974,7 @@ crasher = [await foo(x) for x in [1, 2, 3]]  # E: "await" outside function  [top
 
 def bad() -> None:
     # These are always critical / syntax issues:
-    y = [await foo(x) for x in [1, 2, 3]]  # E: "await" outside coroutine ("async def")
+    y = [await foo(x) for x in [1, 2, 3]]  # E: "await" outside coroutine ("async def")  [await-not-async]
 async def good() -> None:
     y = [await foo(x) for x in [1, 2, 3]]  # OK
 [builtins fixtures/async_await.pyi]

From 041a8af015881655a88f5aff5351576a1ef9010b Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
 <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Sat, 12 Aug 2023 21:15:48 +0100
Subject: [PATCH 047/288] [pre-commit.ci] pre-commit autoupdate (#15796)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

<!--pre-commit.ci start-->
updates:
- [github.com/astral-sh/ruff-pre-commit: v0.0.280 →
v0.0.281](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.280...v0.0.281)
<!--pre-commit.ci end-->

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 .pre-commit-config.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f2367f63bb3d..8650a2868cd6 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -10,7 +10,7 @@ repos:
     hooks:
       - id: black
   - repo: https://github.com/astral-sh/ruff-pre-commit
-    rev: v0.0.280  # must match test-requirements.txt
+    rev: v0.0.281  # must match test-requirements.txt
     hooks:
       - id: ruff
         args: [--exit-non-zero-on-fix]

From 9787a26f97fd6f216260aac89aa2253ed655195b Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 12 Aug 2023 14:00:59 -0700
Subject: [PATCH 048/288] Document await-not-async error code (#15858)

---
 docs/source/error_code_list.rst | 15 +++++++++++++--
 1 file changed, 13 insertions(+), 2 deletions(-)

diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst
index f7f702aa7fcb..157f90249af8 100644
--- a/docs/source/error_code_list.rst
+++ b/docs/source/error_code_list.rst
@@ -1027,9 +1027,20 @@ example:
 
    top = await f()  # Error: "await" outside function  [top-level-await]
 
+.. _code-await-not-async:
+
+Warn about await expressions used outside of coroutines [await-not-async]
+-------------------------------------------------------------------------
+
+``await`` must be used inside a coroutine.
+
+.. code-block:: python
+
+   async def f() -> None:
+       ...
+
    def g() -> None:
-       # This is a blocker error and cannot be silenced.
-       await f()  # Error: "await" outside coroutine ("async def")
+       await f()  # Error: "await" outside coroutine ("async def")  [await-not-async]
 
 .. _code-assert-type:
 

From 117b9147d975c51e27dbea9ab415bc0b3bf4ac69 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sun, 13 Aug 2023 21:19:55 +0100
Subject: [PATCH 049/288] Add option to selectively disable
 --disallow-untyped-calls (#15845)

Fixes #10757

It is surprisingly one of the most upvoted issues. Also it looks quite
easy to implement, so why not. Note I also try to improve docs for
per-module logic for `disallow_untyped_calls`, as there is currently
some confusion.

---------

Co-authored-by: Ivan Levkivskyi <ilevkivskyi@hopper.com>
---
 docs/source/command_line.rst    | 28 +++++++++++++++++
 docs/source/config_file.rst     | 33 +++++++++++++++++++-
 mypy/checkexpr.py               | 23 +++++++++-----
 mypy/config_parser.py           | 18 +++++++++++
 mypy/main.py                    | 17 +++++++++-
 mypy/options.py                 |  4 +++
 test-data/unit/check-flags.test | 55 +++++++++++++++++++++++++++++++++
 7 files changed, 169 insertions(+), 9 deletions(-)

diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
index d9de5cd8f9bd..727d500e2d4d 100644
--- a/docs/source/command_line.rst
+++ b/docs/source/command_line.rst
@@ -350,6 +350,34 @@ definitions or calls.
     This flag reports an error whenever a function with type annotations
     calls a function defined without annotations.
 
+.. option:: --untyped-calls-exclude
+
+    This flag allows to selectively disable :option:`--disallow-untyped-calls`
+    for functions and methods defined in specific packages, modules, or classes.
+    Note that each exclude entry acts as a prefix. For example (assuming there
+    are no type annotations for ``third_party_lib`` available):
+
+    .. code-block:: python
+
+        # mypy --disallow-untyped-calls
+        #      --untyped-calls-exclude=third_party_lib.module_a
+        #      --untyped-calls-exclude=foo.A
+        from third_party_lib.module_a import some_func
+        from third_party_lib.module_b import other_func
+        import foo
+
+        some_func()  # OK, function comes from module `third_party_lib.module_a`
+        other_func()  # E: Call to untyped function "other_func" in typed context
+
+        foo.A().meth()  # OK, method was defined in class `foo.A`
+        foo.B().meth()  # E: Call to untyped function "meth" in typed context
+
+        # file foo.py
+        class A:
+            def meth(self): pass
+        class B:
+            def meth(self): pass
+
 .. option:: --disallow-untyped-defs
 
     This flag reports an error whenever it encounters a function definition
diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst
index 9e79ff99937b..c0798bbf03f1 100644
--- a/docs/source/config_file.rst
+++ b/docs/source/config_file.rst
@@ -490,7 +490,38 @@ section of the command line docs.
     :default: False
 
     Disallows calling functions without type annotations from functions with type
-    annotations.
+    annotations. Note that when used in per-module options, it enables/disables
+    this check **inside** the module(s) specified, not for functions that come
+    from that module(s), for example config like this:
+
+    .. code-block:: ini
+
+        [mypy]
+        disallow_untyped_calls = True
+
+        [mypy-some.library.*]
+        disallow_untyped_calls = False
+
+    will disable this check inside ``some.library``, not for your code that
+    imports ``some.library``. If you want to selectively disable this check for
+    all your code that imports ``some.library`` you should instead use
+    :confval:`untyped_calls_exclude`, for example:
+
+    .. code-block:: ini
+
+        [mypy]
+        disallow_untyped_calls = True
+        untyped_calls_exclude = some.library
+
+.. confval:: untyped_calls_exclude
+
+    :type: comma-separated list of strings
+
+    Selectively excludes functions and methods defined in specific packages,
+    modules, and classes from action of :confval:`disallow_untyped_calls`.
+    This also applies to all submodules of packages (i.e. everything inside
+    a given prefix). Note, this option does not support per-file configuration,
+    the exclusions list is defined globally for all your code.
 
 .. confval:: disallow_untyped_defs
 
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 9e46d9ee39cb..6df64b32493c 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -529,13 +529,6 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) ->
         callee_type = get_proper_type(
             self.accept(e.callee, type_context, always_allow_any=True, is_callee=True)
         )
-        if (
-            self.chk.options.disallow_untyped_calls
-            and self.chk.in_checked_function()
-            and isinstance(callee_type, CallableType)
-            and callee_type.implicit
-        ):
-            self.msg.untyped_function_call(callee_type, e)
 
         # Figure out the full name of the callee for plugin lookup.
         object_type = None
@@ -561,6 +554,22 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) ->
             ):
                 member = e.callee.name
                 object_type = self.chk.lookup_type(e.callee.expr)
+
+        if (
+            self.chk.options.disallow_untyped_calls
+            and self.chk.in_checked_function()
+            and isinstance(callee_type, CallableType)
+            and callee_type.implicit
+        ):
+            if fullname is None and member is not None:
+                assert object_type is not None
+                fullname = self.method_fullname(object_type, member)
+            if not fullname or not any(
+                fullname == p or fullname.startswith(f"{p}.")
+                for p in self.chk.options.untyped_calls_exclude
+            ):
+                self.msg.untyped_function_call(callee_type, e)
+
         ret_type = self.check_call_expr_with_callee_type(
             callee_type, e, fullname, object_type, member
         )
diff --git a/mypy/config_parser.py b/mypy/config_parser.py
index 47b0bc3acabc..a84f3594a0d2 100644
--- a/mypy/config_parser.py
+++ b/mypy/config_parser.py
@@ -81,6 +81,20 @@ def validate_codes(codes: list[str]) -> list[str]:
     return codes
 
 
+def validate_package_allow_list(allow_list: list[str]) -> list[str]:
+    for p in allow_list:
+        msg = f"Invalid allow list entry: {p}"
+        if "*" in p:
+            raise argparse.ArgumentTypeError(
+                f"{msg} (entries are already prefixes so must not contain *)"
+            )
+        if "\\" in p or "/" in p:
+            raise argparse.ArgumentTypeError(
+                f"{msg} (entries must be packages like foo.bar not directories or files)"
+            )
+    return allow_list
+
+
 def expand_path(path: str) -> str:
     """Expand the user home directory and any environment variables contained within
     the provided path.
@@ -164,6 +178,9 @@ def split_commas(value: str) -> list[str]:
     "plugins": lambda s: [p.strip() for p in split_commas(s)],
     "always_true": lambda s: [p.strip() for p in split_commas(s)],
     "always_false": lambda s: [p.strip() for p in split_commas(s)],
+    "untyped_calls_exclude": lambda s: validate_package_allow_list(
+        [p.strip() for p in split_commas(s)]
+    ),
     "enable_incomplete_feature": lambda s: [p.strip() for p in split_commas(s)],
     "disable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]),
     "enable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]),
@@ -187,6 +204,7 @@ def split_commas(value: str) -> list[str]:
         "plugins": try_split,
         "always_true": try_split,
         "always_false": try_split,
+        "untyped_calls_exclude": lambda s: validate_package_allow_list(try_split(s)),
         "enable_incomplete_feature": try_split,
         "disable_error_code": lambda s: validate_codes(try_split(s)),
         "enable_error_code": lambda s: validate_codes(try_split(s)),
diff --git a/mypy/main.py b/mypy/main.py
index 6173fd6fc1a8..30f6cfe97455 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -11,7 +11,12 @@
 from typing import IO, Any, Final, NoReturn, Sequence, TextIO
 
 from mypy import build, defaults, state, util
-from mypy.config_parser import get_config_module_names, parse_config_file, parse_version
+from mypy.config_parser import (
+    get_config_module_names,
+    parse_config_file,
+    parse_version,
+    validate_package_allow_list,
+)
 from mypy.errorcodes import error_codes
 from mypy.errors import CompileError
 from mypy.find_sources import InvalidSourceList, create_source_list
@@ -675,6 +680,14 @@ def add_invertible_flag(
         " from functions with type annotations",
         group=untyped_group,
     )
+    untyped_group.add_argument(
+        "--untyped-calls-exclude",
+        metavar="MODULE",
+        action="append",
+        default=[],
+        help="Disable --disallow-untyped-calls for functions/methods coming"
+        " from specific package, module, or class",
+    )
     add_invertible_flag(
         "--disallow-untyped-defs",
         default=False,
@@ -1307,6 +1320,8 @@ def set_strict_flags() -> None:
             % ", ".join(sorted(overlap))
         )
 
+    validate_package_allow_list(options.untyped_calls_exclude)
+
     # Process `--enable-error-code` and `--disable-error-code` flags
     disabled_codes = set(options.disable_error_code)
     enabled_codes = set(options.enable_error_code)
diff --git a/mypy/options.py b/mypy/options.py
index 75343acd38bb..9b2e88335b24 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -136,6 +136,10 @@ def __init__(self) -> None:
         # Disallow calling untyped functions from typed ones
         self.disallow_untyped_calls = False
 
+        # Always allow untyped calls for function coming from modules/packages
+        # in this list (each item effectively acts as a prefix match)
+        self.untyped_calls_exclude: list[str] = []
+
         # Disallow defining untyped (or incompletely typed) functions
         self.disallow_untyped_defs = False
 
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
index e21157eae991..96f78d81dd16 100644
--- a/test-data/unit/check-flags.test
+++ b/test-data/unit/check-flags.test
@@ -2077,6 +2077,61 @@ y = 1
 f(reveal_type(y)) # E: Call to untyped function "f" in typed context \
                   # N: Revealed type is "builtins.int"
 
+[case testDisallowUntypedCallsAllowListFlags]
+# flags: --disallow-untyped-calls  --untyped-calls-exclude=foo --untyped-calls-exclude=bar.A
+from foo import test_foo
+from bar import A, B
+from baz import test_baz
+from foobar import bad
+
+test_foo(42)  # OK
+test_baz(42)  # E: Call to untyped function "test_baz" in typed context
+bad(42)  # E: Call to untyped function "bad" in typed context
+
+a: A
+b: B
+a.meth()  # OK
+b.meth()  # E: Call to untyped function "meth" in typed context
+[file foo.py]
+def test_foo(x): pass
+[file foobar.py]
+def bad(x): pass
+[file bar.py]
+class A:
+    def meth(self): pass
+class B:
+    def meth(self): pass
+[file baz.py]
+def test_baz(x): pass
+
+[case testDisallowUntypedCallsAllowListConfig]
+# flags: --config-file tmp/mypy.ini
+from foo import test_foo
+from bar import A, B
+from baz import test_baz
+
+test_foo(42)  # OK
+test_baz(42)  # E: Call to untyped function "test_baz" in typed context
+
+a: A
+b: B
+a.meth()  # OK
+b.meth()  # E: Call to untyped function "meth" in typed context
+[file foo.py]
+def test_foo(x): pass
+[file bar.py]
+class A:
+    def meth(self): pass
+class B:
+    def meth(self): pass
+[file baz.py]
+def test_baz(x): pass
+
+[file mypy.ini]
+\[mypy]
+disallow_untyped_calls = True
+untyped_calls_exclude = foo, bar.A
+
 [case testPerModuleErrorCodes]
 # flags: --config-file tmp/mypy.ini
 import tests.foo

From 98881d2cbf6b5a410b5eec2971edc80146422bac Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sun, 13 Aug 2023 13:20:23 -0700
Subject: [PATCH 050/288] Add regression test for expand type -> simplified
 union crash (#15864)

See #13431

Authored by ilevkivskyi
---
 test-data/unit/check-callable.test | 11 +++++++++++
 1 file changed, 11 insertions(+)

diff --git a/test-data/unit/check-callable.test b/test-data/unit/check-callable.test
index 7d25eb271f53..07c42de74bb3 100644
--- a/test-data/unit/check-callable.test
+++ b/test-data/unit/check-callable.test
@@ -587,3 +587,14 @@ class C(B):
     def f(self, x: int) -> C: ...
 class B: ...
 [builtins fixtures/classmethod.pyi]
+
+[case testCallableUnionCallback]
+from typing import Union, Callable, TypeVar
+
+TA = TypeVar("TA", bound="A")
+class A:
+    def __call__(self: TA, other: Union[Callable, TA]) -> TA: ...
+a: A
+a()  # E: Missing positional argument "other" in call to "__call__" of "A"
+a(a)
+a(lambda: None)

From edbfdaa802fd6d951026545b0eddcba5494fbb0b Mon Sep 17 00:00:00 2001
From: chylek <1331917+chylek@users.noreply.github.com>
Date: Sun, 13 Aug 2023 22:46:58 +0200
Subject: [PATCH 051/288] Add option to include docstrings with stubgen 
 (#13284)

### Description

Closes #11965.

Add a --include-docstrings flag to stubgen. This was suggested in #11965
along with a use case.
When using this flag, the .pyi files will include docstrings for Python
classes and functions and for C extension functions.
The flag is optional and does not change the default stubgen behaviour.
When using the flag, the resulting function stubs that contain docstring
will no longer be one-liners, but functions without a docstring still
retain the default one-liner style.

Example input:
```python
class A:
    """class docstring"""
    def func():
        """func docstring"""
        ...
    def nodoc():
        ...
```
output:
```python
class A:
    """class docstring"""
    def func() -> None:
        """func docstring"""
        ...
    def nodoc() -> None: ...
```
## Test Plan

Tests `testIncludeDocstrings` and `testIgnoreDocstrings` were added to
`test-data/unit/stubgen.test` to ensure the code works as intended. All
other tests passed as well.

C extension docstrings are tested using an updated bash script
`misc/test_stubgenc.sh` with test data in
`test-data/pybind11_mypy_demo/stubgen-include-docs` in same fashion as
in an already existing test.

---------

Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
---
 docs/source/stubgen.rst                       |   5 +
 misc/test-stubgenc.sh                         |  30 +++--
 mypy/fastparse.py                             |   4 +
 mypy/nodes.py                                 |   4 +
 mypy/options.py                               |   6 +
 mypy/stubgen.py                               |  42 ++++++-
 mypy/stubgenc.py                              |  29 +++--
 mypy/util.py                                  |  17 +++
 test-data/pybind11_mypy_demo/src/main.cpp     |   4 +-
 .../pybind11_mypy_demo/__init__.pyi           |   0
 .../pybind11_mypy_demo/basics.pyi             | 112 ++++++++++++++++++
 test-data/unit/stubgen.test                   |  79 ++++++++++++
 12 files changed, 311 insertions(+), 21 deletions(-)
 create mode 100644 test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi
 create mode 100644 test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi

diff --git a/docs/source/stubgen.rst b/docs/source/stubgen.rst
index f06c9c066bb7..2de0743572e7 100644
--- a/docs/source/stubgen.rst
+++ b/docs/source/stubgen.rst
@@ -163,6 +163,11 @@ Additional flags
     Instead, only export imported names that are not referenced in the module
     that contains the import.
 
+.. option:: --include-docstrings
+
+    Include docstrings in stubs. This will add docstrings to Python function and
+    classes stubs and to C extension function stubs.
+
 .. option:: --search-path PATH
 
     Specify module search directories, separated by colons (only used if
diff --git a/misc/test-stubgenc.sh b/misc/test-stubgenc.sh
index 7da135f0bf16..7713e1b04e43 100755
--- a/misc/test-stubgenc.sh
+++ b/misc/test-stubgenc.sh
@@ -3,17 +3,33 @@
 set -e
 set -x
 
-cd "$(dirname $0)/.."
+cd "$(dirname "$0")/.."
 
 # Install dependencies, demo project and mypy
 python -m pip install -r test-requirements.txt
 python -m pip install ./test-data/pybind11_mypy_demo
 python -m pip install .
 
-# Remove expected stubs and generate new inplace
-STUBGEN_OUTPUT_FOLDER=./test-data/pybind11_mypy_demo/stubgen
-rm -rf $STUBGEN_OUTPUT_FOLDER/*
-stubgen -p pybind11_mypy_demo -o $STUBGEN_OUTPUT_FOLDER
+EXIT=0
 
-# Compare generated stubs to expected ones
-git diff --exit-code $STUBGEN_OUTPUT_FOLDER
+# performs the stubgenc test
+# first argument is the test result folder
+# everything else is passed to stubgen as its arguments
+function stubgenc_test() {
+    # Remove expected stubs and generate new inplace
+    STUBGEN_OUTPUT_FOLDER=./test-data/pybind11_mypy_demo/$1
+    rm -rf "${STUBGEN_OUTPUT_FOLDER:?}/*"
+    stubgen -o "$STUBGEN_OUTPUT_FOLDER" "${@:2}"
+
+    # Compare generated stubs to expected ones
+    if ! git diff --exit-code "$STUBGEN_OUTPUT_FOLDER";
+    then
+        EXIT=$?
+    fi
+}
+
+# create stubs without docstrings
+stubgenc_test stubgen -p pybind11_mypy_demo
+# create stubs with docstrings
+stubgenc_test stubgen-include-docs -p pybind11_mypy_demo --include-docstrings
+exit $EXIT
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index f7a98e9b2b8f..3a26cfe7d6ff 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -1008,6 +1008,8 @@ def do_func_def(
             # FuncDef overrides set_line -- can't use self.set_line
             func_def.set_line(lineno, n.col_offset, end_line, end_column)
             retval = func_def
+        if self.options.include_docstrings:
+            func_def.docstring = ast3.get_docstring(n, clean=False)
         self.class_and_function_stack.pop()
         return retval
 
@@ -1121,6 +1123,8 @@ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef:
         cdef.line = n.lineno
         cdef.deco_line = n.decorator_list[0].lineno if n.decorator_list else None
 
+        if self.options.include_docstrings:
+            cdef.docstring = ast3.get_docstring(n, clean=False)
         cdef.column = n.col_offset
         cdef.end_line = getattr(n, "end_lineno", None)
         cdef.end_column = getattr(n, "end_col_offset", None)
diff --git a/mypy/nodes.py b/mypy/nodes.py
index ebd222f4f253..452a4f643255 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -751,6 +751,7 @@ class FuncDef(FuncItem, SymbolNode, Statement):
         "is_mypy_only",
         # Present only when a function is decorated with @typing.datasclass_transform or similar
         "dataclass_transform_spec",
+        "docstring",
     )
 
     __match_args__ = ("name", "arguments", "type", "body")
@@ -779,6 +780,7 @@ def __init__(
         # Definitions that appear in if TYPE_CHECKING are marked with this flag.
         self.is_mypy_only = False
         self.dataclass_transform_spec: DataclassTransformSpec | None = None
+        self.docstring: str | None = None
 
     @property
     def name(self) -> str:
@@ -1081,6 +1083,7 @@ class ClassDef(Statement):
         "analyzed",
         "has_incompatible_baseclass",
         "deco_line",
+        "docstring",
         "removed_statements",
     )
 
@@ -1127,6 +1130,7 @@ def __init__(
         self.has_incompatible_baseclass = False
         # Used for error reporting (to keep backwad compatibility with pre-3.8)
         self.deco_line: int | None = None
+        self.docstring: str | None = None
         self.removed_statements = []
 
     @property
diff --git a/mypy/options.py b/mypy/options.py
index 9b2e88335b24..5e451c0aa0a3 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -283,6 +283,12 @@ def __init__(self) -> None:
         # mypy. (Like mypyc.)
         self.preserve_asts = False
 
+        # If True, function and class docstrings will be extracted and retained.
+        # This isn't exposed as a command line option
+        # because it is intended for software integrating with
+        # mypy. (Like stubgen.)
+        self.include_docstrings = False
+
         # Paths of user plugins
         self.plugins: list[str] = []
 
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index a77ee738d56f..b6fc3e8b7377 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -243,6 +243,7 @@ def __init__(
         verbose: bool,
         quiet: bool,
         export_less: bool,
+        include_docstrings: bool,
     ) -> None:
         # See parse_options for descriptions of the flags.
         self.pyversion = pyversion
@@ -261,6 +262,7 @@ def __init__(
         self.verbose = verbose
         self.quiet = quiet
         self.export_less = export_less
+        self.include_docstrings = include_docstrings
 
 
 class StubSource:
@@ -624,6 +626,7 @@ def __init__(
         include_private: bool = False,
         analyzed: bool = False,
         export_less: bool = False,
+        include_docstrings: bool = False,
     ) -> None:
         # Best known value of __all__.
         self._all_ = _all_
@@ -638,6 +641,7 @@ def __init__(
         self._state = EMPTY
         self._toplevel_names: list[str] = []
         self._include_private = include_private
+        self._include_docstrings = include_docstrings
         self._current_class: ClassDef | None = None
         self.import_tracker = ImportTracker()
         # Was the tree semantically analysed before?
@@ -809,7 +813,13 @@ def visit_func_def(self, o: FuncDef) -> None:
             retfield = " -> " + retname
 
         self.add(", ".join(args))
-        self.add(f"){retfield}: ...\n")
+        self.add(f"){retfield}:")
+        if self._include_docstrings and o.docstring:
+            docstring = mypy.util.quote_docstring(o.docstring)
+            self.add(f"\n{self._indent}    {docstring}\n")
+        else:
+            self.add(" ...\n")
+
         self._state = FUNC
 
     def is_none_expr(self, expr: Expression) -> bool:
@@ -910,8 +920,11 @@ def visit_class_def(self, o: ClassDef) -> None:
         if base_types:
             self.add(f"({', '.join(base_types)})")
         self.add(":\n")
-        n = len(self._output)
         self._indent += "    "
+        if self._include_docstrings and o.docstring:
+            docstring = mypy.util.quote_docstring(o.docstring)
+            self.add(f"{self._indent}{docstring}\n")
+        n = len(self._output)
         self._vars.append([])
         super().visit_class_def(o)
         self._indent = self._indent[:-4]
@@ -920,7 +933,8 @@ def visit_class_def(self, o: ClassDef) -> None:
         if len(self._output) == n:
             if self._state == EMPTY_CLASS and sep is not None:
                 self._output[sep] = ""
-            self._output[-1] = self._output[-1][:-1] + " ...\n"
+            if not (self._include_docstrings and o.docstring):
+                self._output[-1] = self._output[-1][:-1] + " ...\n"
             self._state = EMPTY_CLASS
         else:
             self._state = CLASS
@@ -1710,6 +1724,7 @@ def mypy_options(stubgen_options: Options) -> MypyOptions:
     options.show_traceback = True
     options.transform_source = remove_misplaced_type_comments
     options.preserve_asts = True
+    options.include_docstrings = stubgen_options.include_docstrings
 
     # Override cache_dir if provided in the environment
     environ_cache_dir = os.getenv("MYPY_CACHE_DIR", "")
@@ -1773,6 +1788,7 @@ def generate_stub_from_ast(
     parse_only: bool = False,
     include_private: bool = False,
     export_less: bool = False,
+    include_docstrings: bool = False,
 ) -> None:
     """Use analysed (or just parsed) AST to generate type stub for single file.
 
@@ -1784,6 +1800,7 @@ def generate_stub_from_ast(
         include_private=include_private,
         analyzed=not parse_only,
         export_less=export_less,
+        include_docstrings=include_docstrings,
     )
     assert mod.ast is not None, "This function must be used only with analyzed modules"
     mod.ast.accept(gen)
@@ -1845,7 +1862,12 @@ def generate_stubs(options: Options) -> None:
         files.append(target)
         with generate_guarded(mod.module, target, options.ignore_errors, options.verbose):
             generate_stub_from_ast(
-                mod, target, options.parse_only, options.include_private, options.export_less
+                mod,
+                target,
+                options.parse_only,
+                options.include_private,
+                options.export_less,
+                include_docstrings=options.include_docstrings,
             )
 
     # Separately analyse C modules using different logic.
@@ -1859,7 +1881,11 @@ def generate_stubs(options: Options) -> None:
         files.append(target)
         with generate_guarded(mod.module, target, options.ignore_errors, options.verbose):
             generate_stub_for_c_module(
-                mod.module, target, known_modules=all_modules, sig_generators=sig_generators
+                mod.module,
+                target,
+                known_modules=all_modules,
+                sig_generators=sig_generators,
+                include_docstrings=options.include_docstrings,
             )
     num_modules = len(py_modules) + len(c_modules)
     if not options.quiet and num_modules > 0:
@@ -1913,6 +1939,11 @@ def parse_options(args: list[str]) -> Options:
         action="store_true",
         help="don't implicitly export all names imported from other modules in the same package",
     )
+    parser.add_argument(
+        "--include-docstrings",
+        action="store_true",
+        help="include existing docstrings with the stubs",
+    )
     parser.add_argument("-v", "--verbose", action="store_true", help="show more verbose messages")
     parser.add_argument("-q", "--quiet", action="store_true", help="show fewer messages")
     parser.add_argument(
@@ -1993,6 +2024,7 @@ def parse_options(args: list[str]) -> Options:
         verbose=ns.verbose,
         quiet=ns.quiet,
         export_less=ns.export_less,
+        include_docstrings=ns.include_docstrings,
     )
 
 
diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py
index 8aa1fb3d2c0a..31487f9d0dcf 100755
--- a/mypy/stubgenc.py
+++ b/mypy/stubgenc.py
@@ -14,6 +14,7 @@
 from types import ModuleType
 from typing import Any, Final, Iterable, Mapping
 
+import mypy.util
 from mypy.moduleinspect import is_c_module
 from mypy.stubdoc import (
     ArgSig,
@@ -169,6 +170,7 @@ def generate_stub_for_c_module(
     target: str,
     known_modules: list[str],
     sig_generators: Iterable[SignatureGenerator],
+    include_docstrings: bool = False,
 ) -> None:
     """Generate stub for C module.
 
@@ -201,6 +203,7 @@ def generate_stub_for_c_module(
                 known_modules=known_modules,
                 imports=imports,
                 sig_generators=sig_generators,
+                include_docstrings=include_docstrings,
             )
             done.add(name)
     types: list[str] = []
@@ -216,6 +219,7 @@ def generate_stub_for_c_module(
                 known_modules=known_modules,
                 imports=imports,
                 sig_generators=sig_generators,
+                include_docstrings=include_docstrings,
             )
             done.add(name)
     variables = []
@@ -319,15 +323,17 @@ def generate_c_function_stub(
     self_var: str | None = None,
     cls: type | None = None,
     class_name: str | None = None,
+    include_docstrings: bool = False,
 ) -> None:
     """Generate stub for a single function or method.
 
-    The result (always a single line) will be appended to 'output'.
+    The result will be appended to 'output'.
     If necessary, any required names will be added to 'imports'.
     The 'class_name' is used to find signature of __init__ or __new__ in
     'class_sigs'.
     """
     inferred: list[FunctionSig] | None = None
+    docstr: str | None = None
     if class_name:
         # method:
         assert cls is not None, "cls should be provided for methods"
@@ -379,13 +385,19 @@ def generate_c_function_stub(
             # a sig generator indicates @classmethod by specifying the cls arg
             if class_name and signature.args and signature.args[0].name == "cls":
                 output.append("@classmethod")
-            output.append(
-                "def {function}({args}) -> {ret}: ...".format(
-                    function=name,
-                    args=", ".join(args),
-                    ret=strip_or_import(signature.ret_type, module, known_modules, imports),
-                )
+            output_signature = "def {function}({args}) -> {ret}:".format(
+                function=name,
+                args=", ".join(args),
+                ret=strip_or_import(signature.ret_type, module, known_modules, imports),
             )
+            if include_docstrings and docstr:
+                docstr_quoted = mypy.util.quote_docstring(docstr.strip())
+                docstr_indented = "\n    ".join(docstr_quoted.split("\n"))
+                output.append(output_signature)
+                output.extend(f"    {docstr_indented}".split("\n"))
+            else:
+                output_signature += " ..."
+                output.append(output_signature)
 
 
 def strip_or_import(
@@ -493,6 +505,7 @@ def generate_c_type_stub(
     known_modules: list[str],
     imports: list[str],
     sig_generators: Iterable[SignatureGenerator],
+    include_docstrings: bool = False,
 ) -> None:
     """Generate stub for a single class using runtime introspection.
 
@@ -535,6 +548,7 @@ def generate_c_type_stub(
                 cls=obj,
                 class_name=class_name,
                 sig_generators=sig_generators,
+                include_docstrings=include_docstrings,
             )
         elif is_c_property(raw_value):
             generate_c_property_stub(
@@ -557,6 +571,7 @@ def generate_c_type_stub(
                 imports=imports,
                 known_modules=known_modules,
                 sig_generators=sig_generators,
+                include_docstrings=include_docstrings,
             )
         else:
             attrs.append((attr, value))
diff --git a/mypy/util.py b/mypy/util.py
index 8a079c5256bc..d0f2f8c6cc36 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -809,3 +809,20 @@ def plural_s(s: int | Sized) -> str:
         return "s"
     else:
         return ""
+
+
+def quote_docstring(docstr: str) -> str:
+    """Returns docstring correctly encapsulated in a single or double quoted form."""
+    # Uses repr to get hint on the correct quotes and escape everything properly.
+    # Creating multiline string for prettier output.
+    docstr_repr = "\n".join(re.split(r"(?<=[^\\])\\n", repr(docstr)))
+
+    if docstr_repr.startswith("'"):
+        # Enforce double quotes when it's safe to do so.
+        # That is when double quotes are not in the string
+        # or when it doesn't end with a single quote.
+        if '"' not in docstr_repr[1:-1] and docstr_repr[-2] != "'":
+            return f'"""{docstr_repr[1:-1]}"""'
+        return f"''{docstr_repr}''"
+    else:
+        return f'""{docstr_repr}""'
diff --git a/test-data/pybind11_mypy_demo/src/main.cpp b/test-data/pybind11_mypy_demo/src/main.cpp
index ff0f93bf7017..00e5b2f4e871 100644
--- a/test-data/pybind11_mypy_demo/src/main.cpp
+++ b/test-data/pybind11_mypy_demo/src/main.cpp
@@ -119,8 +119,8 @@ void bind_basics(py::module& basics) {
   using namespace basics;
 
   // Functions
-  basics.def("answer", &answer);
-  basics.def("sum", &sum);
+  basics.def("answer", &answer, "answer docstring, with end quote\""); // tests explicit docstrings
+  basics.def("sum", &sum, "multiline docstring test, edge case quotes \"\"\"'''");
   basics.def("midpoint", &midpoint, py::arg("left"), py::arg("right"));
   basics.def("weighted_midpoint", weighted_midpoint, py::arg("left"), py::arg("right"), py::arg("alpha")=0.5);
 
diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi
new file mode 100644
index 000000000000..676d7f6d3f15
--- /dev/null
+++ b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi
@@ -0,0 +1,112 @@
+from typing import ClassVar
+
+from typing import overload
+PI: float
+
+class Point:
+    class AngleUnit:
+        __members__: ClassVar[dict] = ...  # read-only
+        __entries: ClassVar[dict] = ...
+        degree: ClassVar[Point.AngleUnit] = ...
+        radian: ClassVar[Point.AngleUnit] = ...
+        def __init__(self, value: int) -> None:
+            """__init__(self: pybind11_mypy_demo.basics.Point.AngleUnit, value: int) -> None"""
+        def __eq__(self, other: object) -> bool:
+            """__eq__(self: object, other: object) -> bool"""
+        def __getstate__(self) -> int:
+            """__getstate__(self: object) -> int"""
+        def __hash__(self) -> int:
+            """__hash__(self: object) -> int"""
+        def __index__(self) -> int:
+            """__index__(self: pybind11_mypy_demo.basics.Point.AngleUnit) -> int"""
+        def __int__(self) -> int:
+            """__int__(self: pybind11_mypy_demo.basics.Point.AngleUnit) -> int"""
+        def __ne__(self, other: object) -> bool:
+            """__ne__(self: object, other: object) -> bool"""
+        def __setstate__(self, state: int) -> None:
+            """__setstate__(self: pybind11_mypy_demo.basics.Point.AngleUnit, state: int) -> None"""
+        @property
+        def name(self) -> str: ...
+        @property
+        def value(self) -> int: ...
+
+    class LengthUnit:
+        __members__: ClassVar[dict] = ...  # read-only
+        __entries: ClassVar[dict] = ...
+        inch: ClassVar[Point.LengthUnit] = ...
+        mm: ClassVar[Point.LengthUnit] = ...
+        pixel: ClassVar[Point.LengthUnit] = ...
+        def __init__(self, value: int) -> None:
+            """__init__(self: pybind11_mypy_demo.basics.Point.LengthUnit, value: int) -> None"""
+        def __eq__(self, other: object) -> bool:
+            """__eq__(self: object, other: object) -> bool"""
+        def __getstate__(self) -> int:
+            """__getstate__(self: object) -> int"""
+        def __hash__(self) -> int:
+            """__hash__(self: object) -> int"""
+        def __index__(self) -> int:
+            """__index__(self: pybind11_mypy_demo.basics.Point.LengthUnit) -> int"""
+        def __int__(self) -> int:
+            """__int__(self: pybind11_mypy_demo.basics.Point.LengthUnit) -> int"""
+        def __ne__(self, other: object) -> bool:
+            """__ne__(self: object, other: object) -> bool"""
+        def __setstate__(self, state: int) -> None:
+            """__setstate__(self: pybind11_mypy_demo.basics.Point.LengthUnit, state: int) -> None"""
+        @property
+        def name(self) -> str: ...
+        @property
+        def value(self) -> int: ...
+    angle_unit: ClassVar[Point.AngleUnit] = ...
+    length_unit: ClassVar[Point.LengthUnit] = ...
+    x_axis: ClassVar[Point] = ...  # read-only
+    y_axis: ClassVar[Point] = ...  # read-only
+    origin: ClassVar[Point] = ...
+    x: float
+    y: float
+    @overload
+    def __init__(self) -> None:
+        """__init__(*args, **kwargs)
+        Overloaded function.
+
+        1. __init__(self: pybind11_mypy_demo.basics.Point) -> None
+
+        2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None"""
+    @overload
+    def __init__(self, x: float, y: float) -> None:
+        """__init__(*args, **kwargs)
+        Overloaded function.
+
+        1. __init__(self: pybind11_mypy_demo.basics.Point) -> None
+
+        2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None"""
+    @overload
+    def distance_to(self, x: float, y: float) -> float:
+        """distance_to(*args, **kwargs)
+        Overloaded function.
+
+        1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float
+
+        2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float"""
+    @overload
+    def distance_to(self, other: Point) -> float:
+        """distance_to(*args, **kwargs)
+        Overloaded function.
+
+        1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float
+
+        2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float"""
+    @property
+    def length(self) -> float: ...
+
+def answer() -> int:
+    '''answer() -> int
+
+    answer docstring, with end quote"'''
+def midpoint(left: float, right: float) -> float:
+    """midpoint(left: float, right: float) -> float"""
+def sum(arg0: int, arg1: int) -> int:
+    '''sum(arg0: int, arg1: int) -> int
+
+    multiline docstring test, edge case quotes """\'\'\''''
+def weighted_midpoint(left: float, right: float, alpha: float = ...) -> float:
+    """weighted_midpoint(left: float, right: float, alpha: float = 0.5) -> float"""
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index f6b71a994153..774a17b76161 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -3183,6 +3183,85 @@ def f2():
 def f1(): ...
 def f2(): ...
 
+[case testIncludeDocstrings]
+# flags:  --include-docstrings
+class A:
+    """class docstring
+
+    a multiline docstring"""
+    def func():
+        """func docstring
+        don't forget to indent"""
+        ...
+    def nodoc():
+        ...
+class B:
+    def quoteA():
+        '''func docstring with quotes"""\\n
+        and an end quote\''''
+        ...
+    def quoteB():
+        '''func docstring with quotes"""
+        \'\'\'
+        and an end quote\\"'''
+        ...
+    def quoteC():
+        """func docstring with end quote\\\""""
+        ...
+    def quoteD():
+        r'''raw with quotes\"'''
+        ...
+[out]
+class A:
+    """class docstring
+
+    a multiline docstring"""
+    def func() -> None:
+        """func docstring
+        don't forget to indent"""
+    def nodoc() -> None: ...
+
+class B:
+    def quoteA() -> None:
+        '''func docstring with quotes"""\\n
+        and an end quote\''''
+    def quoteB() -> None:
+        '''func docstring with quotes"""
+        \'\'\'
+        and an end quote\\"'''
+    def quoteC() -> None:
+        '''func docstring with end quote\\"'''
+    def quoteD() -> None:
+        '''raw with quotes\\"'''
+
+[case testIgnoreDocstrings]
+class A:
+    """class docstring
+
+    a multiline docstring"""
+    def func():
+        """func docstring
+
+        don't forget to indent"""
+    def nodoc():
+        ...
+
+class B:
+    def func():
+        """func docstring"""
+        ...
+    def nodoc():
+        ...
+
+[out]
+class A:
+    def func() -> None: ...
+    def nodoc() -> None: ...
+
+class B:
+    def func() -> None: ...
+    def nodoc() -> None: ...
+
 [case testKnownMagicMethodsReturnTypes]
 class Some:
     def __len__(self): ...

From 11a94be6f408d5bce391a1ec3931ce7197ca207b Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Mon, 14 Aug 2023 13:58:52 +0100
Subject: [PATCH 052/288] Add regression test for fixed bug involving bytes
 formatting (#15867)

Adds a regression test for #12665, which is a strange bug that was fixed
somewhat by accident
---
 test-data/unit/check-formatting.test | 17 +++++++++++++++++
 1 file changed, 17 insertions(+)

diff --git a/test-data/unit/check-formatting.test b/test-data/unit/check-formatting.test
index 7d23c2e199f1..75651124b76f 100644
--- a/test-data/unit/check-formatting.test
+++ b/test-data/unit/check-formatting.test
@@ -484,6 +484,23 @@ class D(bytes):
 '{}'.format(D())
 [builtins fixtures/primitives.pyi]
 
+[case testNoSpuriousFormattingErrorsDuringFailedOverlodMatch]
+from typing import overload, Callable
+
+@overload
+def sub(pattern: str, repl: Callable[[str], str]) -> str: ...
+@overload
+def sub(pattern: bytes, repl: Callable[[bytes], bytes]) -> bytes: ...
+def sub(pattern: object, repl: object) -> object:
+    pass
+
+def better_snakecase(text: str) -> str:
+    # Mypy used to emit a spurious error here
+    # warning about interpolating bytes into an f-string:
+    text = sub(r"([A-Z])([A-Z]+)([A-Z](?:[^A-Z]|$))", lambda match: f"{match}")
+    return text
+[builtins fixtures/primitives.pyi]
+
 [case testFormatCallFinal]
 from typing_extensions import Final
 

From a1fcad5bd6a6f71fac6f1a2f235302b2172ddd7d Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Mon, 14 Aug 2023 21:54:09 +0100
Subject: [PATCH 053/288] Add missing type annotations to the `primitives.pyi`
 fixture (#15871)

This fixes some weird test failures I was seeing locally when trying to
run just the tests in `check-enum.test` (invoked via `pytest
mypy/test/testcheck.py::TypeCheckSuite::check-enum.test`)
---
 test-data/unit/fixtures/primitives.pyi | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi
index c9b1e3f4e983..63128a8ae03d 100644
--- a/test-data/unit/fixtures/primitives.pyi
+++ b/test-data/unit/fixtures/primitives.pyi
@@ -12,7 +12,7 @@ class object:
     def __ne__(self, other: object) -> bool: pass
 
 class type:
-    def __init__(self, x) -> None: pass
+    def __init__(self, x: object) -> None: pass
 
 class int:
     # Note: this is a simplification of the actual signature
@@ -30,7 +30,7 @@ class str(Sequence[str]):
     def __iter__(self) -> Iterator[str]: pass
     def __contains__(self, other: object) -> bool: pass
     def __getitem__(self, item: int) -> str: pass
-    def format(self, *args, **kwargs) -> str: pass
+    def format(self, *args: object, **kwargs: object) -> str: pass
 class bytes(Sequence[int]):
     def __iter__(self) -> Iterator[int]: pass
     def __contains__(self, other: object) -> bool: pass

From 854a9f8f82a6dae085d3514897961871fe7005b1 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 14 Aug 2023 22:05:56 +0100
Subject: [PATCH 054/288] Allow None vs TypeVar overlap for overloads (#15846)

Fixes #8881

This is technically unsafe, and I remember we explicitly discussed this
a while ago, but related use cases turn out to be more common than I
expected (judging by how popular the issue is). Also the fix is really
simple.

---------

Co-authored-by: Ivan Levkivskyi <ilevkivskyi@hopper.com>
Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 mypy/checker.py                       | 24 ++++++--
 mypy/checkexpr.py                     | 86 +++++++++++++++++++++------
 mypy/subtypes.py                      | 15 ++++-
 test-data/unit/check-overloading.test | 39 ++++++++++--
 4 files changed, 135 insertions(+), 29 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index b786155079e5..3bd9c494a890 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -7216,22 +7216,32 @@ def is_unsafe_overlapping_overload_signatures(
     #
     # This discrepancy is unfortunately difficult to get rid of, so we repeat the
     # checks twice in both directions for now.
+    #
+    # Note that we ignore possible overlap between type variables and None. This
+    # is technically unsafe, but unsafety is tiny and this prevents some common
+    # use cases like:
+    #     @overload
+    #     def foo(x: None) -> None: ..
+    #     @overload
+    #     def foo(x: T) -> Foo[T]: ...
     return is_callable_compatible(
         signature,
         other,
-        is_compat=is_overlapping_types_no_promote_no_uninhabited,
+        is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none,
         is_compat_return=lambda l, r: not is_subtype_no_promote(l, r),
         ignore_return=False,
         check_args_covariantly=True,
         allow_partial_overlap=True,
+        no_unify_none=True,
     ) or is_callable_compatible(
         other,
         signature,
-        is_compat=is_overlapping_types_no_promote_no_uninhabited,
+        is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none,
         is_compat_return=lambda l, r: not is_subtype_no_promote(r, l),
         ignore_return=False,
         check_args_covariantly=False,
         allow_partial_overlap=True,
+        no_unify_none=True,
     )
 
 
@@ -7717,12 +7727,18 @@ def is_subtype_no_promote(left: Type, right: Type) -> bool:
     return is_subtype(left, right, ignore_promotions=True)
 
 
-def is_overlapping_types_no_promote_no_uninhabited(left: Type, right: Type) -> bool:
+def is_overlapping_types_no_promote_no_uninhabited_no_none(left: Type, right: Type) -> bool:
     # For the purpose of unsafe overload checks we consider list[<nothing>] and list[int]
     # non-overlapping. This is consistent with how we treat list[int] and list[str] as
     # non-overlapping, despite [] belongs to both. Also this will prevent false positives
     # for failed type inference during unification.
-    return is_overlapping_types(left, right, ignore_promotions=True, ignore_uninhabited=True)
+    return is_overlapping_types(
+        left,
+        right,
+        ignore_promotions=True,
+        ignore_uninhabited=True,
+        prohibit_none_typevar_overlap=True,
+    )
 
 
 def is_private(node_name: str) -> bool:
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 6df64b32493c..d00bbb288f3e 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -2409,6 +2409,11 @@ def check_overload_call(
         #         typevar. See https://github.com/python/mypy/issues/4063 for related discussion.
         erased_targets: list[CallableType] | None = None
         unioned_result: tuple[Type, Type] | None = None
+
+        # Determine whether we need to encourage union math. This should be generally safe,
+        # as union math infers better results in the vast majority of cases, but it is very
+        # computationally intensive.
+        none_type_var_overlap = self.possible_none_type_var_overlap(arg_types, plausible_targets)
         union_interrupted = False  # did we try all union combinations?
         if any(self.real_union(arg) for arg in arg_types):
             try:
@@ -2421,6 +2426,7 @@ def check_overload_call(
                         arg_names,
                         callable_name,
                         object_type,
+                        none_type_var_overlap,
                         context,
                     )
             except TooManyUnions:
@@ -2453,8 +2459,10 @@ def check_overload_call(
         # If any of checks succeed, stop early.
         if inferred_result is not None and unioned_result is not None:
             # Both unioned and direct checks succeeded, choose the more precise type.
-            if is_subtype(inferred_result[0], unioned_result[0]) and not isinstance(
-                get_proper_type(inferred_result[0]), AnyType
+            if (
+                is_subtype(inferred_result[0], unioned_result[0])
+                and not isinstance(get_proper_type(inferred_result[0]), AnyType)
+                and not none_type_var_overlap
             ):
                 return inferred_result
             return unioned_result
@@ -2504,7 +2512,8 @@ def check_overload_call(
             callable_name=callable_name,
             object_type=object_type,
         )
-        if union_interrupted:
+        # Do not show the extra error if the union math was forced.
+        if union_interrupted and not none_type_var_overlap:
             self.chk.fail(message_registry.TOO_MANY_UNION_COMBINATIONS, context)
         return result
 
@@ -2659,6 +2668,44 @@ def overload_erased_call_targets(
                 matches.append(typ)
         return matches
 
+    def possible_none_type_var_overlap(
+        self, arg_types: list[Type], plausible_targets: list[CallableType]
+    ) -> bool:
+        """Heuristic to determine whether we need to try forcing union math.
+
+        This is needed to avoid greedy type variable match in situations like this:
+            @overload
+            def foo(x: None) -> None: ...
+            @overload
+            def foo(x: T) -> list[T]: ...
+
+            x: int | None
+            foo(x)
+        we want this call to infer list[int] | None, not list[int | None].
+        """
+        if not plausible_targets or not arg_types:
+            return False
+        has_optional_arg = False
+        for arg_type in get_proper_types(arg_types):
+            if not isinstance(arg_type, UnionType):
+                continue
+            for item in get_proper_types(arg_type.items):
+                if isinstance(item, NoneType):
+                    has_optional_arg = True
+                    break
+        if not has_optional_arg:
+            return False
+
+        min_prefix = min(len(c.arg_types) for c in plausible_targets)
+        for i in range(min_prefix):
+            if any(
+                isinstance(get_proper_type(c.arg_types[i]), NoneType) for c in plausible_targets
+            ) and any(
+                isinstance(get_proper_type(c.arg_types[i]), TypeVarType) for c in plausible_targets
+            ):
+                return True
+        return False
+
     def union_overload_result(
         self,
         plausible_targets: list[CallableType],
@@ -2668,6 +2715,7 @@ def union_overload_result(
         arg_names: Sequence[str | None] | None,
         callable_name: str | None,
         object_type: Type | None,
+        none_type_var_overlap: bool,
         context: Context,
         level: int = 0,
     ) -> list[tuple[Type, Type]] | None:
@@ -2707,20 +2755,23 @@ def union_overload_result(
 
         # Step 3: Try a direct match before splitting to avoid unnecessary union splits
         # and save performance.
-        with self.type_overrides_set(args, arg_types):
-            direct = self.infer_overload_return_type(
-                plausible_targets,
-                args,
-                arg_types,
-                arg_kinds,
-                arg_names,
-                callable_name,
-                object_type,
-                context,
-            )
-        if direct is not None and not isinstance(get_proper_type(direct[0]), (UnionType, AnyType)):
-            # We only return non-unions soon, to avoid greedy match.
-            return [direct]
+        if not none_type_var_overlap:
+            with self.type_overrides_set(args, arg_types):
+                direct = self.infer_overload_return_type(
+                    plausible_targets,
+                    args,
+                    arg_types,
+                    arg_kinds,
+                    arg_names,
+                    callable_name,
+                    object_type,
+                    context,
+                )
+            if direct is not None and not isinstance(
+                get_proper_type(direct[0]), (UnionType, AnyType)
+            ):
+                # We only return non-unions soon, to avoid greedy match.
+                return [direct]
 
         # Step 4: Split the first remaining union type in arguments into items and
         # try to match each item individually (recursive).
@@ -2738,6 +2789,7 @@ def union_overload_result(
                 arg_names,
                 callable_name,
                 object_type,
+                none_type_var_overlap,
                 context,
                 level + 1,
             )
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 5712d7375e50..da92f7398d4e 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -1299,6 +1299,7 @@ def is_callable_compatible(
     check_args_covariantly: bool = False,
     allow_partial_overlap: bool = False,
     strict_concatenate: bool = False,
+    no_unify_none: bool = False,
 ) -> bool:
     """Is the left compatible with the right, using the provided compatibility check?
 
@@ -1415,7 +1416,9 @@ def g(x: int) -> int: ...
     # (below) treats type variables on the two sides as independent.
     if left.variables:
         # Apply generic type variables away in left via type inference.
-        unified = unify_generic_callable(left, right, ignore_return=ignore_return)
+        unified = unify_generic_callable(
+            left, right, ignore_return=ignore_return, no_unify_none=no_unify_none
+        )
         if unified is None:
             return False
         left = unified
@@ -1427,7 +1430,9 @@ def g(x: int) -> int: ...
     # So, we repeat the above checks in the opposite direction. This also
     # lets us preserve the 'symmetry' property of allow_partial_overlap.
     if allow_partial_overlap and right.variables:
-        unified = unify_generic_callable(right, left, ignore_return=ignore_return)
+        unified = unify_generic_callable(
+            right, left, ignore_return=ignore_return, no_unify_none=no_unify_none
+        )
         if unified is not None:
             right = unified
 
@@ -1687,6 +1692,8 @@ def unify_generic_callable(
     target: NormalizedCallableType,
     ignore_return: bool,
     return_constraint_direction: int | None = None,
+    *,
+    no_unify_none: bool = False,
 ) -> NormalizedCallableType | None:
     """Try to unify a generic callable type with another callable type.
 
@@ -1708,6 +1715,10 @@ def unify_generic_callable(
             type.ret_type, target.ret_type, return_constraint_direction
         )
         constraints.extend(c)
+    if no_unify_none:
+        constraints = [
+            c for c in constraints if not isinstance(get_proper_type(c.target), NoneType)
+        ]
     inferred_vars, _ = mypy.solve.solve_constraints(type.variables, constraints)
     if None in inferred_vars:
         return None
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index 50acd7d77c8c..4910dfe05d31 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -2185,36 +2185,63 @@ def bar2(*x: int) -> int: ...
 [builtins fixtures/tuple.pyi]
 
 [case testOverloadDetectsPossibleMatchesWithGenerics]
-from typing import overload, TypeVar, Generic
+# flags: --strict-optional
+from typing import overload, TypeVar, Generic, Optional, List
 
 T = TypeVar('T')
+# The examples below are unsafe, but it is a quite common pattern
+# so we ignore the possibility of type variables taking value `None`
+# for the purpose of overload overlap checks.
 
 @overload
-def foo(x: None, y: None) -> str: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+def foo(x: None, y: None) -> str: ...
 @overload
 def foo(x: T, y: T) -> int: ...
 def foo(x): ...
 
+oi: Optional[int]
+reveal_type(foo(None, None))  # N: Revealed type is "builtins.str"
+reveal_type(foo(None, 42))  # N: Revealed type is "builtins.int"
+reveal_type(foo(42, 42))  # N: Revealed type is "builtins.int"
+reveal_type(foo(oi, None))  # N: Revealed type is "Union[builtins.int, builtins.str]"
+reveal_type(foo(oi, 42))  # N: Revealed type is "builtins.int"
+reveal_type(foo(oi, oi))  # N: Revealed type is "Union[builtins.int, builtins.str]"
+
+@overload
+def foo_list(x: None) -> None: ...
+@overload
+def foo_list(x: T) -> List[T]: ...
+def foo_list(x): ...
+
+reveal_type(foo_list(oi))  # N: Revealed type is "Union[builtins.list[builtins.int], None]"
+
 # What if 'T' is 'object'?
 @overload
-def bar(x: None, y: int) -> str: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+def bar(x: None, y: int) -> str: ...
 @overload
 def bar(x: T, y: T) -> int: ...
 def bar(x, y): ...
 
 class Wrapper(Generic[T]):
     @overload
-    def foo(self, x: None, y: None) -> str: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+    def foo(self, x: None, y: None) -> str: ...
     @overload
     def foo(self, x: T, y: None) -> int: ...
     def foo(self, x): ...
 
     @overload
-    def bar(self, x: None, y: int) -> str: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+    def bar(self, x: None, y: int) -> str: ...
     @overload
     def bar(self, x: T, y: T) -> int: ...
     def bar(self, x, y): ...
 
+@overload
+def baz(x: str, y: str) -> str: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+@overload
+def baz(x: T, y: T) -> int: ...
+def baz(x): ...
+[builtins fixtures/tuple.pyi]
+
 [case testOverloadFlagsPossibleMatches]
 from wrapper import *
 [file wrapper.pyi]
@@ -3996,7 +4023,7 @@ T = TypeVar('T')
 
 class FakeAttribute(Generic[T]):
     @overload
-    def dummy(self, instance: None, owner: Type[T]) -> 'FakeAttribute[T]': ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+    def dummy(self, instance: None, owner: Type[T]) -> 'FakeAttribute[T]': ...
     @overload
     def dummy(self, instance: T, owner: Type[T]) -> int: ...
     def dummy(self, instance: Optional[T], owner: Type[T]) -> Union['FakeAttribute[T]', int]: ...

From b49be105d2940e3a0607f5ec76f519931b0d0a08 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
 <41898282+github-actions[bot]@users.noreply.github.com>
Date: Mon, 14 Aug 2023 18:09:10 -0700
Subject: [PATCH 055/288] Sync typeshed (#15873)

Source commit:

https://github.com/python/typeshed/commit/74aac1aa891cbb568b124c955010b19d40f9fda7
---
 mypy/typeshed/stdlib/asyncio/base_events.pyi  | 47 ++++++++++++++++++-
 mypy/typeshed/stdlib/asyncio/constants.pyi    |  2 +
 mypy/typeshed/stdlib/asyncio/events.pyi       |  2 +
 mypy/typeshed/stdlib/asyncio/streams.pyi      | 11 ++++-
 mypy/typeshed/stdlib/asyncio/tasks.pyi        | 23 ++++++++-
 mypy/typeshed/stdlib/enum.pyi                 | 10 ++--
 .../stdlib/importlib/metadata/__init__.pyi    |  1 +
 mypy/typeshed/stdlib/logging/__init__.pyi     | 18 ++++++-
 mypy/typeshed/stdlib/socket.pyi               |  1 +
 mypy/typeshed/stdlib/sre_parse.pyi            | 24 ++++++++--
 mypy/typeshed/stdlib/ssl.pyi                  | 41 +++++++++++-----
 mypy/typeshed/stdlib/turtle.pyi               |  7 +++
 mypy/typeshed/stdlib/typing.pyi               | 10 +++-
 mypy/typeshed/stdlib/typing_extensions.pyi    | 10 +++-
 14 files changed, 175 insertions(+), 32 deletions(-)

diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi
index 9924f728f6ea..e2b55da8c718 100644
--- a/mypy/typeshed/stdlib/asyncio/base_events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi
@@ -107,7 +107,48 @@ class BaseEventLoop(AbstractEventLoop):
         flags: int = 0,
     ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ...
     async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ...
-    if sys.version_info >= (3, 11):
+    if sys.version_info >= (3, 12):
+        @overload
+        async def create_connection(
+            self,
+            protocol_factory: Callable[[], _ProtocolT],
+            host: str = ...,
+            port: int = ...,
+            *,
+            ssl: _SSLContext = None,
+            family: int = 0,
+            proto: int = 0,
+            flags: int = 0,
+            sock: None = None,
+            local_addr: tuple[str, int] | None = None,
+            server_hostname: str | None = None,
+            ssl_handshake_timeout: float | None = None,
+            ssl_shutdown_timeout: float | None = None,
+            happy_eyeballs_delay: float | None = None,
+            interleave: int | None = None,
+            all_errors: bool = False,
+        ) -> tuple[Transport, _ProtocolT]: ...
+        @overload
+        async def create_connection(
+            self,
+            protocol_factory: Callable[[], _ProtocolT],
+            host: None = None,
+            port: None = None,
+            *,
+            ssl: _SSLContext = None,
+            family: int = 0,
+            proto: int = 0,
+            flags: int = 0,
+            sock: socket,
+            local_addr: None = None,
+            server_hostname: str | None = None,
+            ssl_handshake_timeout: float | None = None,
+            ssl_shutdown_timeout: float | None = None,
+            happy_eyeballs_delay: float | None = None,
+            interleave: int | None = None,
+            all_errors: bool = False,
+        ) -> tuple[Transport, _ProtocolT]: ...
+    elif sys.version_info >= (3, 11):
         @overload
         async def create_connection(
             self,
@@ -426,5 +467,7 @@ class BaseEventLoop(AbstractEventLoop):
     # Debug flag management.
     def get_debug(self) -> bool: ...
     def set_debug(self, enabled: bool) -> None: ...
-    if sys.version_info >= (3, 9):
+    if sys.version_info >= (3, 12):
+        async def shutdown_default_executor(self, timeout: float | None = None) -> None: ...
+    elif sys.version_info >= (3, 9):
         async def shutdown_default_executor(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/asyncio/constants.pyi b/mypy/typeshed/stdlib/asyncio/constants.pyi
index af209fa9ee62..60d8529209c2 100644
--- a/mypy/typeshed/stdlib/asyncio/constants.pyi
+++ b/mypy/typeshed/stdlib/asyncio/constants.pyi
@@ -11,6 +11,8 @@ if sys.version_info >= (3, 11):
     SSL_SHUTDOWN_TIMEOUT: float
     FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256]
     FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512]
+if sys.version_info >= (3, 12):
+    THREAD_JOIN_TIMEOUT: Literal[300]
 
 class _SendfileMode(enum.Enum):
     UNSUPPORTED: int
diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi
index b1b0fcfa5fd7..cde63b279b0d 100644
--- a/mypy/typeshed/stdlib/asyncio/events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/events.pyi
@@ -76,6 +76,8 @@ class Handle:
     def cancel(self) -> None: ...
     def _run(self) -> None: ...
     def cancelled(self) -> bool: ...
+    if sys.version_info >= (3, 12):
+        def get_context(self) -> Context: ...
 
 class TimerHandle(Handle):
     def __init__(
diff --git a/mypy/typeshed/stdlib/asyncio/streams.pyi b/mypy/typeshed/stdlib/asyncio/streams.pyi
index f30c57305d93..804be1ca5065 100644
--- a/mypy/typeshed/stdlib/asyncio/streams.pyi
+++ b/mypy/typeshed/stdlib/asyncio/streams.pyi
@@ -148,7 +148,16 @@ class StreamWriter:
     async def wait_closed(self) -> None: ...
     def get_extra_info(self, name: str, default: Any = None) -> Any: ...
     async def drain(self) -> None: ...
-    if sys.version_info >= (3, 11):
+    if sys.version_info >= (3, 12):
+        async def start_tls(
+            self,
+            sslcontext: ssl.SSLContext,
+            *,
+            server_hostname: str | None = None,
+            ssl_handshake_timeout: float | None = None,
+            ssl_shutdown_timeout: float | None = None,
+        ) -> None: ...
+    elif sys.version_info >= (3, 11):
         async def start_tls(
             self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None
         ) -> None: ...
diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi
index d8c101f281fc..5ea30d3791de 100644
--- a/mypy/typeshed/stdlib/asyncio/tasks.pyi
+++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi
@@ -285,7 +285,26 @@ else:
 # since the only reason why `asyncio.Future` is invariant is the `set_result()` method,
 # and `asyncio.Task.set_result()` always raises.
 class Task(Future[_T_co], Generic[_T_co]):  # type: ignore[type-var]  # pyright: ignore[reportGeneralTypeIssues]
-    if sys.version_info >= (3, 8):
+    if sys.version_info >= (3, 12):
+        def __init__(
+            self,
+            coro: _TaskCompatibleCoro[_T_co],
+            *,
+            loop: AbstractEventLoop = ...,
+            name: str | None,
+            context: Context | None = None,
+            eager_start: bool = False,
+        ) -> None: ...
+    elif sys.version_info >= (3, 11):
+        def __init__(
+            self,
+            coro: _TaskCompatibleCoro[_T_co],
+            *,
+            loop: AbstractEventLoop = ...,
+            name: str | None,
+            context: Context | None = None,
+        ) -> None: ...
+    elif sys.version_info >= (3, 8):
         def __init__(
             self, coro: _TaskCompatibleCoro[_T_co], *, loop: AbstractEventLoop = ..., name: str | None = ...
         ) -> None: ...
@@ -295,6 +314,8 @@ class Task(Future[_T_co], Generic[_T_co]):  # type: ignore[type-var]  # pyright:
         def get_coro(self) -> _TaskCompatibleCoro[_T_co]: ...
         def get_name(self) -> str: ...
         def set_name(self, __value: object) -> None: ...
+    if sys.version_info >= (3, 12):
+        def get_context(self) -> Context: ...
 
     def get_stack(self, *, limit: int | None = None) -> list[FrameType]: ...
     def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: ...
diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi
index 60cc27215fd0..a8ba7bf157c2 100644
--- a/mypy/typeshed/stdlib/enum.pyi
+++ b/mypy/typeshed/stdlib/enum.pyi
@@ -2,7 +2,6 @@ import _typeshed
 import sys
 import types
 from _typeshed import SupportsKeysAndGetItem, Unused
-from abc import ABCMeta
 from builtins import property as _builtins_property
 from collections.abc import Callable, Iterable, Iterator, Mapping
 from typing import Any, Generic, TypeVar, overload
@@ -76,12 +75,8 @@ class _EnumDict(dict[str, Any]):
         @overload
         def update(self, members: Iterable[tuple[str, Any]], **more_members: Any) -> None: ...
 
-# Note: EnumMeta actually subclasses type directly, not ABCMeta.
-# This is a temporary workaround to allow multiple creation of enums with builtins
-# such as str as mixins, which due to the handling of ABCs of builtin types, cause
-# spurious inconsistent metaclass structure. See #1595.
 # Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself
-class EnumMeta(ABCMeta):
+class EnumMeta(type):
     if sys.version_info >= (3, 11):
         def __new__(
             metacls: type[_typeshed.Self],
@@ -193,6 +188,9 @@ class Enum(metaclass=EnumMeta):
     def __hash__(self) -> int: ...
     def __format__(self, format_spec: str) -> str: ...
     def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ...
+    if sys.version_info >= (3, 12):
+        def __copy__(self) -> Self: ...
+        def __deepcopy__(self, memo: Any) -> Self: ...
 
 if sys.version_info >= (3, 11):
     class ReprEnum(Enum): ...
diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
index 0af33bc876c4..0f8a6f56cf88 100644
--- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
+++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
@@ -180,6 +180,7 @@ class MetadataPathFinder(DistributionFinder):
         def invalidate_caches(cls) -> None: ...
 
 class PathDistribution(Distribution):
+    _path: Path
     def __init__(self, path: Path) -> None: ...
     def read_text(self, filename: StrPath) -> str: ...
     def locate_file(self, path: StrPath) -> PathLike[str]: ...
diff --git a/mypy/typeshed/stdlib/logging/__init__.pyi b/mypy/typeshed/stdlib/logging/__init__.pyi
index 6ebd305aacb8..db797d4180ea 100644
--- a/mypy/typeshed/stdlib/logging/__init__.pyi
+++ b/mypy/typeshed/stdlib/logging/__init__.pyi
@@ -60,6 +60,8 @@ __all__ = [
 
 if sys.version_info >= (3, 11):
     __all__ += ["getLevelNamesMapping"]
+if sys.version_info >= (3, 12):
+    __all__ += ["getHandlerByName", "getHandlerNames"]
 
 _SysExcInfoType: TypeAlias = tuple[type[BaseException], BaseException, TracebackType | None] | tuple[None, None, None]
 _ExcInfoType: TypeAlias = None | bool | _SysExcInfoType | BaseException
@@ -83,7 +85,10 @@ class Filterer:
     filters: list[_FilterType]
     def addFilter(self, filter: _FilterType) -> None: ...
     def removeFilter(self, filter: _FilterType) -> None: ...
-    def filter(self, record: LogRecord) -> bool: ...
+    if sys.version_info >= (3, 12):
+        def filter(self, record: LogRecord) -> bool | LogRecord: ...
+    else:
+        def filter(self, record: LogRecord) -> bool: ...
 
 class Manager:  # undocumented
     root: RootLogger
@@ -111,6 +116,8 @@ class Logger(Filterer):
     def isEnabledFor(self, level: int) -> bool: ...
     def getEffectiveLevel(self) -> int: ...
     def getChild(self, suffix: str) -> Self: ...  # see python/typing#980
+    if sys.version_info >= (3, 12):
+        def getChildren(self) -> set[Logger]: ...
     if sys.version_info >= (3, 8):
         def debug(
             self,
@@ -324,6 +331,10 @@ class Handler(Filterer):
     def format(self, record: LogRecord) -> str: ...
     def emit(self, record: LogRecord) -> None: ...
 
+if sys.version_info >= (3, 12):
+    def getHandlerByName(name: str) -> Handler | None: ...
+    def getHandlerNames() -> frozenset[str]: ...
+
 class Formatter:
     converter: Callable[[float | None], struct_time]
     _fmt: str | None  # undocumented
@@ -370,7 +381,10 @@ class Filter:
     name: str  # undocumented
     nlen: int  # undocumented
     def __init__(self, name: str = "") -> None: ...
-    def filter(self, record: LogRecord) -> bool: ...
+    if sys.version_info >= (3, 12):
+        def filter(self, record: LogRecord) -> bool | LogRecord: ...
+    else:
+        def filter(self, record: LogRecord) -> bool: ...
 
 class LogRecord:
     # args can be set to None by logging.handlers.QueueHandler
diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi
index 5dd92ec8e116..da06ce2c2b06 100644
--- a/mypy/typeshed/stdlib/socket.pyi
+++ b/mypy/typeshed/stdlib/socket.pyi
@@ -201,6 +201,7 @@ if sys.platform != "win32" and sys.platform != "darwin":
         TCP_LINGER2 as TCP_LINGER2,
         TCP_QUICKACK as TCP_QUICKACK,
         TCP_SYNCNT as TCP_SYNCNT,
+        TCP_USER_TIMEOUT as TCP_USER_TIMEOUT,
         TCP_WINDOW_CLAMP as TCP_WINDOW_CLAMP,
     )
 if sys.platform != "win32":
diff --git a/mypy/typeshed/stdlib/sre_parse.pyi b/mypy/typeshed/stdlib/sre_parse.pyi
index 56f10bb41d57..8ef65223dc34 100644
--- a/mypy/typeshed/stdlib/sre_parse.pyi
+++ b/mypy/typeshed/stdlib/sre_parse.pyi
@@ -87,25 +87,39 @@ class Tokenizer:
     def seek(self, index: int) -> None: ...
     def error(self, msg: str, offset: int = 0) -> _Error: ...
 
-    if sys.version_info >= (3, 11):
+    if sys.version_info >= (3, 12):
+        def checkgroupname(self, name: str, offset: int) -> None: ...
+    elif sys.version_info >= (3, 11):
         def checkgroupname(self, name: str, offset: int, nested: int) -> None: ...
 
 def fix_flags(src: str | bytes, flags: int) -> int: ...
 
 _TemplateType: TypeAlias = tuple[list[tuple[int, int]], list[str | None]]
 _TemplateByteType: TypeAlias = tuple[list[tuple[int, int]], list[bytes | None]]
-if sys.version_info >= (3, 8):
-    def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ...
+
+if sys.version_info >= (3, 12):
+    @overload
+    def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ...
+    @overload
+    def parse_template(source: bytes, pattern: _Pattern[Any]) -> _TemplateByteType: ...
+
+elif sys.version_info >= (3, 8):
     @overload
     def parse_template(source: str, state: _Pattern[Any]) -> _TemplateType: ...
     @overload
     def parse_template(source: bytes, state: _Pattern[Any]) -> _TemplateByteType: ...
 
 else:
-    def parse(str: str, flags: int = 0, pattern: Pattern | None = None) -> SubPattern: ...
     @overload
     def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ...
     @overload
     def parse_template(source: bytes, pattern: _Pattern[Any]) -> _TemplateByteType: ...
 
-def expand_template(template: _TemplateType, match: Match[Any]) -> str: ...
+if sys.version_info >= (3, 8):
+    def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ...
+
+else:
+    def parse(str: str, flags: int = 0, pattern: Pattern | None = None) -> SubPattern: ...
+
+if sys.version_info < (3, 12):
+    def expand_template(template: _TemplateType, match: Match[Any]) -> str: ...
diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi
index 446bbf8d1009..dd7285196ed9 100644
--- a/mypy/typeshed/stdlib/ssl.pyi
+++ b/mypy/typeshed/stdlib/ssl.pyi
@@ -44,18 +44,20 @@ class SSLCertVerificationError(SSLError, ValueError):
 
 CertificateError = SSLCertVerificationError
 
-def wrap_socket(
-    sock: socket.socket,
-    keyfile: StrOrBytesPath | None = None,
-    certfile: StrOrBytesPath | None = None,
-    server_side: bool = False,
-    cert_reqs: int = ...,
-    ssl_version: int = ...,
-    ca_certs: str | None = None,
-    do_handshake_on_connect: bool = True,
-    suppress_ragged_eofs: bool = True,
-    ciphers: str | None = None,
-) -> SSLSocket: ...
+if sys.version_info < (3, 12):
+    def wrap_socket(
+        sock: socket.socket,
+        keyfile: StrOrBytesPath | None = None,
+        certfile: StrOrBytesPath | None = None,
+        server_side: bool = False,
+        cert_reqs: int = ...,
+        ssl_version: int = ...,
+        ca_certs: str | None = None,
+        do_handshake_on_connect: bool = True,
+        suppress_ragged_eofs: bool = True,
+        ciphers: str | None = None,
+    ) -> SSLSocket: ...
+
 def create_default_context(
     purpose: Purpose = ...,
     *,
@@ -95,7 +97,10 @@ else:
 _create_default_https_context: Callable[..., SSLContext]
 
 def RAND_bytes(__n: int) -> bytes: ...
-def RAND_pseudo_bytes(__n: int) -> tuple[bytes, bool]: ...
+
+if sys.version_info < (3, 12):
+    def RAND_pseudo_bytes(__n: int) -> tuple[bytes, bool]: ...
+
 def RAND_status() -> bool: ...
 def RAND_egd(path: str) -> None: ...
 def RAND_add(__string: str | ReadableBuffer, __entropy: float) -> None: ...
@@ -198,6 +203,11 @@ class Options(enum.IntFlag):
         OP_ENABLE_MIDDLEBOX_COMPAT: int
         if sys.platform == "linux":
             OP_IGNORE_UNEXPECTED_EOF: int
+    if sys.version_info >= (3, 12):
+        OP_LEGACY_SERVER_CONNECT: int
+    if sys.version_info >= (3, 12) and sys.platform != "linux":
+        OP_ENABLE_KTLS: int
+        OP_IGNORE_UNEXPECTED_EOF: int
 
 OP_ALL: Options
 OP_NO_SSLv2: Options
@@ -216,6 +226,11 @@ if sys.version_info >= (3, 8):
     OP_ENABLE_MIDDLEBOX_COMPAT: Options
     if sys.platform == "linux":
         OP_IGNORE_UNEXPECTED_EOF: Options
+if sys.version_info >= (3, 12):
+    OP_LEGACY_SERVER_CONNECT: Options
+if sys.version_info >= (3, 12) and sys.platform != "linux":
+    OP_ENABLE_KTLS: Options
+    OP_IGNORE_UNEXPECTED_EOF: Options
 
 HAS_NEVER_CHECK_COMMON_NAME: bool
 HAS_SSLv2: bool
diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi
index 5df3e4b90cb5..80ea40879dee 100644
--- a/mypy/typeshed/stdlib/turtle.pyi
+++ b/mypy/typeshed/stdlib/turtle.pyi
@@ -1,3 +1,4 @@
+import sys
 from collections.abc import Callable, Sequence
 from tkinter import Canvas, Frame, Misc, PhotoImage, Scrollbar
 from typing import Any, ClassVar, overload
@@ -249,6 +250,9 @@ class TNavigator:
     def reset(self) -> None: ...
     def degrees(self, fullcircle: float = 360.0) -> None: ...
     def radians(self) -> None: ...
+    if sys.version_info >= (3, 12):
+        def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ...
+
     def forward(self, distance: float) -> None: ...
     def back(self, distance: float) -> None: ...
     def right(self, angle: float) -> None: ...
@@ -321,6 +325,9 @@ class TPen:
     def color(self, r: float, g: float, b: float) -> None: ...
     @overload
     def color(self, color1: _Color, color2: _Color) -> None: ...
+    if sys.version_info >= (3, 12):
+        def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ...
+
     def showturtle(self) -> None: ...
     def hideturtle(self) -> None: ...
     def isvisible(self) -> bool: ...
diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi
index 6a307368642f..a9bffdf5214f 100644
--- a/mypy/typeshed/stdlib/typing.pyi
+++ b/mypy/typeshed/stdlib/typing.pyi
@@ -900,8 +900,16 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta):
     def keys(self) -> dict_keys[str, object]: ...
     def values(self) -> dict_values[str, object]: ...
     if sys.version_info >= (3, 9):
+        @overload
         def __or__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ...
-        def __ior__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ...
+        @overload
+        def __or__(self, __value: dict[str, Any]) -> dict[str, object]: ...
+        @overload
+        def __ror__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ...
+        @overload
+        def __ror__(self, __value: dict[str, Any]) -> dict[str, object]: ...
+        # supposedly incompatible definitions of __or__ and __ior__
+        def __ior__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ...  # type: ignore[misc]
 
 @_final
 class ForwardRef:
diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi
index efcc13e42047..9320dc50b6bb 100644
--- a/mypy/typeshed/stdlib/typing_extensions.pyi
+++ b/mypy/typeshed/stdlib/typing_extensions.pyi
@@ -233,8 +233,16 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta):
     def values(self) -> dict_values[str, object]: ...
     def __delitem__(self, k: Never) -> None: ...
     if sys.version_info >= (3, 9):
+        @overload
         def __or__(self, __value: Self) -> Self: ...
-        def __ior__(self, __value: Self) -> Self: ...
+        @overload
+        def __or__(self, __value: dict[str, Any]) -> dict[str, object]: ...
+        @overload
+        def __ror__(self, __value: Self) -> Self: ...
+        @overload
+        def __ror__(self, __value: dict[str, Any]) -> dict[str, object]: ...
+        # supposedly incompatible definitions of `__ior__` and `__or__`:
+        def __ior__(self, __value: Self) -> Self: ...  # type: ignore[misc]
 
 # TypedDict is a (non-subscriptable) special form.
 TypedDict: object

From fda7a460485cb856c595d4d0593a0ec6c0fe03e9 Mon Sep 17 00:00:00 2001
From: Albert Tugushev <albert@tugushev.ru>
Date: Tue, 15 Aug 2023 17:40:37 +0200
Subject: [PATCH 056/288] Fix all the missing references found within the docs
 (#15875)

Fixes #13196.

Enable the nit-picky mode on sphinx-build in tox, as this will
facilitate the detection of potential issues related to missing
references.
---
 docs/source/error_code_list.rst  |  2 +-
 docs/source/more_types.rst       | 18 +++++++++---------
 docs/source/runtime_troubles.rst |  4 ++--
 tox.ini                          |  2 +-
 4 files changed, 13 insertions(+), 13 deletions(-)

diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst
index 157f90249af8..1f75ac54d525 100644
--- a/docs/source/error_code_list.rst
+++ b/docs/source/error_code_list.rst
@@ -835,7 +835,7 @@ ellipsis ``...``, a docstring, and a ``raise NotImplementedError`` statement.
 Check the target of NewType [valid-newtype]
 -------------------------------------------
 
-The target of a :py:func:`NewType <typing.NewType>` definition must be a class type. It can't
+The target of a :py:class:`~typing.NewType` definition must be a class type. It can't
 be a union type, ``Any``, or various other special types.
 
 You can also get this error if the target has been imported from a
diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst
index 542ff1c57c71..4e6e9204fdca 100644
--- a/docs/source/more_types.rst
+++ b/docs/source/more_types.rst
@@ -2,7 +2,7 @@ More types
 ==========
 
 This section introduces a few additional kinds of types, including :py:data:`~typing.NoReturn`,
-:py:func:`NewType <typing.NewType>`, and types for async code. It also discusses
+:py:class:`~typing.NewType`, and types for async code. It also discusses
 how to give functions more precise types using overloads. All of these are only
 situationally useful, so feel free to skip this section and come back when you
 have a need for some of them.
@@ -11,7 +11,7 @@ Here's a quick summary of what's covered here:
 
 * :py:data:`~typing.NoReturn` lets you tell mypy that a function never returns normally.
 
-* :py:func:`NewType <typing.NewType>` lets you define a variant of a type that is treated as a
+* :py:class:`~typing.NewType` lets you define a variant of a type that is treated as a
   separate type by mypy but is identical to the original type at runtime.
   For example, you can have ``UserId`` as a variant of ``int`` that is
   just an ``int`` at runtime.
@@ -75,7 +75,7 @@ certain values from base class instances. Example:
         ...
 
 However, this approach introduces some runtime overhead. To avoid this, the typing
-module provides a helper object :py:func:`NewType <typing.NewType>` that creates simple unique types with
+module provides a helper object :py:class:`~typing.NewType` that creates simple unique types with
 almost zero runtime overhead. Mypy will treat the statement
 ``Derived = NewType('Derived', Base)`` as being roughly equivalent to the following
 definition:
@@ -113,12 +113,12 @@ implicitly casting from ``UserId`` where ``int`` is expected. Examples:
 
     num: int = UserId(5) + 1
 
-:py:func:`NewType <typing.NewType>` accepts exactly two arguments. The first argument must be a string literal
+:py:class:`~typing.NewType` accepts exactly two arguments. The first argument must be a string literal
 containing the name of the new type and must equal the name of the variable to which the new
 type is assigned. The second argument must be a properly subclassable class, i.e.,
 not a type construct like :py:data:`~typing.Union`, etc.
 
-The callable returned by :py:func:`NewType <typing.NewType>` accepts only one argument; this is equivalent to
+The callable returned by :py:class:`~typing.NewType` accepts only one argument; this is equivalent to
 supporting only one constructor accepting an instance of the base class (see above).
 Example:
 
@@ -139,12 +139,12 @@ Example:
     tcp_packet = TcpPacketId(127, 0)  # Fails in type checker and at runtime
 
 You cannot use :py:func:`isinstance` or :py:func:`issubclass` on the object returned by
-:py:func:`~typing.NewType`, nor can you subclass an object returned by :py:func:`~typing.NewType`.
+:py:class:`~typing.NewType`, nor can you subclass an object returned by :py:class:`~typing.NewType`.
 
 .. note::
 
-    Unlike type aliases, :py:func:`NewType <typing.NewType>` will create an entirely new and
-    unique type when used. The intended purpose of :py:func:`NewType <typing.NewType>` is to help you
+    Unlike type aliases, :py:class:`~typing.NewType` will create an entirely new and
+    unique type when used. The intended purpose of :py:class:`~typing.NewType` is to help you
     detect cases where you accidentally mixed together the old base type and the
     new derived type.
 
@@ -160,7 +160,7 @@ You cannot use :py:func:`isinstance` or :py:func:`issubclass` on the object retu
 
         name_by_id(3)  # ints and UserId are synonymous
 
-    But a similar example using :py:func:`NewType <typing.NewType>` will not typecheck:
+    But a similar example using :py:class:`~typing.NewType` will not typecheck:
 
     .. code-block:: python
 
diff --git a/docs/source/runtime_troubles.rst b/docs/source/runtime_troubles.rst
index 909215a774a9..66ab7b3a84c7 100644
--- a/docs/source/runtime_troubles.rst
+++ b/docs/source/runtime_troubles.rst
@@ -86,7 +86,7 @@ required to be valid Python syntax. For more details, see :pep:`563`.
 
     * :ref:`type aliases <type-aliases>`;
     * :ref:`type narrowing <type-narrowing>`;
-    * type definitions (see :py:class:`~typing.TypeVar`, :py:func:`~typing.NewType`, :py:class:`~typing.NamedTuple`);
+    * type definitions (see :py:class:`~typing.TypeVar`, :py:class:`~typing.NewType`, :py:class:`~typing.NamedTuple`);
     * base classes.
 
     .. code-block:: python
@@ -263,7 +263,7 @@ If your subclass is also generic, you can use the following:
    reveal_type(task_queue.get())  # Reveals str
 
 In Python 3.9, we can just inherit directly from ``Queue[str]`` or ``Queue[T]``
-since its :py:class:`queue.Queue` implements :py:meth:`__class_getitem__`, so
+since its :py:class:`queue.Queue` implements :py:meth:`~object.__class_getitem__`, so
 the class object can be subscripted at runtime without issue.
 
 Using types defined in stubs but not at runtime
diff --git a/tox.ini b/tox.ini
index 5a728e27fec4..a809c4d2c570 100644
--- a/tox.ini
+++ b/tox.ini
@@ -37,7 +37,7 @@ passenv =
     VERIFY_MYPY_ERROR_CODES
 deps = -rdocs/requirements-docs.txt
 commands =
-    sphinx-build -d "{toxworkdir}/docs_doctree" docs/source "{toxworkdir}/docs_out" --color -W -bhtml {posargs}
+    sphinx-build -n -d "{toxworkdir}/docs_doctree" docs/source "{toxworkdir}/docs_out" --color -W -bhtml {posargs}
     python -c 'import pathlib; print("documentation available under file://\{0\}".format(pathlib.Path(r"{toxworkdir}") / "docs_out" / "index.html"))'
 
 [testenv:lint]

From 14418bc3d2c38b9ea776da6029e9d9dc6650b7ea Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Tue, 15 Aug 2023 20:31:26 +0100
Subject: [PATCH 057/288] Polymorphic inference: support for parameter
 specifications and lambdas (#15837)

This is a third follow-up for https://github.com/python/mypy/pull/15287
(likely there will be just one more PR, for `TypeVarTuple`s, and few
less important items I mentioned in the original PR I will leave for
more distant future).

After all this PR turned out to be larger than I wanted. The problem is
that `Concatenate` support for `ParamSpec` was quite broken, and this
caused many of my tests fail. So I decided to include some major cleanup
in this PR (I tried splitting it into a separate PR but it turned out to
be tricky). After all, if one ignores added tests, it is almost net zero
line count.

The main problems that I encountered are:
* First, valid substitutions for a `ParamSpecType` were: another
`ParamSpecType`, `Parameters`, and `CallableType` (and also `AnyType`
and `UninhabitedType` but those seem to be handled trivially). Having
`CallableType` in this list caused various missed cases, bogus
`get_proper_type()`s, and was generally counter-intuitive.
* Second (and probably bigger) issue is that it is possible to represent
`Concatenate` in two different forms: as a prefix for `ParamSpecType`
(used mostly for instances), and as separate argument types (used mostly
for callables). The problem is that some parts of the code were
implicitly relying on it being in one or the other form, while some
other code uncontrollably switched between the two.

I propose to fix this by introducing some simplifications and rules
(some of which I enforce by asserts):
* Only valid non-trivial substitutions (and consequently upper/lower
bound in constraints) for `ParamSpecType` are `ParamSpecType` and
`Parameters`.
* When `ParamSpecType` appears in a callable it must have an empty
`prefix`.
* `Parameters` cannot contain other `Parameters` (and ideally also
`ParamSpecType`s) among argument types.
* For inference we bring `Concatenate` to common representation (because
both callables and instances may appear in the same expression). Using
the `ParamSpecType` representation with `prefix` looks significantly
simpler (especially in solver).

Apart from this actual implementation of polymorphic inference is
simple/straightforward, I just handle the additional `ParamSpecType`
cases (in addition to `TypeVarType`) for inference, for solver, and for
application. I also enabled polymorphic inference for lambda
expressions, since they are handled by similar code paths.

Some minor comments:
* I fixed couple minor bugs uncovered by this PR (see e.g. test case for
accidental `TypeVar` id clash).
* I switch few tests to `--new-type-inference` because there error
messages are slightly different, and so it is easier for me to test
global flip to `True` locally.
* I may tweak some of the "ground rules" if `mypy_primer` output will be
particularly bad.

---------

Co-authored-by: Ivan Levkivskyi <ilevkivskyi@hopper.com>
---
 mypy/applytype.py                             |  11 +-
 mypy/checker.py                               |  13 +-
 mypy/checkexpr.py                             | 123 ++++++++--
 mypy/constraints.py                           | 148 ++++++-----
 mypy/expandtype.py                            | 102 +++-----
 mypy/join.py                                  |  10 +-
 mypy/meet.py                                  |   5 +-
 mypy/solve.py                                 |  38 +--
 mypy/subtypes.py                              |  14 +-
 mypy/test/testtypes.py                        |   2 +-
 mypy/type_visitor.py                          |   2 +-
 mypy/typeanal.py                              |  22 +-
 mypy/typeops.py                               |  19 +-
 mypy/types.py                                 |  45 ++--
 test-data/unit/check-functions.test           |   8 +-
 test-data/unit/check-generics.test            | 230 +++++++++++++++++-
 test-data/unit/check-inference-context.test   |   3 +-
 test-data/unit/check-inference.test           |  10 +-
 test-data/unit/check-overloading.test         |  21 +-
 .../unit/check-parameter-specification.test   |  47 +++-
 20 files changed, 639 insertions(+), 234 deletions(-)

diff --git a/mypy/applytype.py b/mypy/applytype.py
index f8be63362a6b..6abe7f0022f8 100644
--- a/mypy/applytype.py
+++ b/mypy/applytype.py
@@ -9,7 +9,6 @@
     AnyType,
     CallableType,
     Instance,
-    Parameters,
     ParamSpecType,
     PartialType,
     TupleType,
@@ -112,9 +111,13 @@ def apply_generic_arguments(
     if param_spec is not None:
         nt = id_to_type.get(param_spec.id)
         if nt is not None:
-            nt = get_proper_type(nt)
-            if isinstance(nt, (CallableType, Parameters)):
-                callable = callable.expand_param_spec(nt)
+            # ParamSpec expansion is special-cased, so we need to always expand callable
+            # as a whole, not expanding arguments individually.
+            callable = expand_type(callable, id_to_type)
+            assert isinstance(callable, CallableType)
+            return callable.copy_modified(
+                variables=[tv for tv in tvars if tv.id not in id_to_type]
+            )
 
     # Apply arguments to argument types.
     var_arg = callable.var_arg()
diff --git a/mypy/checker.py b/mypy/checker.py
index 3bd9c494a890..5d97a0dec713 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -4280,12 +4280,14 @@ def check_return_stmt(self, s: ReturnStmt) -> None:
                 return_type = self.return_types[-1]
             return_type = get_proper_type(return_type)
 
+            is_lambda = isinstance(self.scope.top_function(), LambdaExpr)
             if isinstance(return_type, UninhabitedType):
-                self.fail(message_registry.NO_RETURN_EXPECTED, s)
-                return
+                # Avoid extra error messages for failed inference in lambdas
+                if not is_lambda or not return_type.ambiguous:
+                    self.fail(message_registry.NO_RETURN_EXPECTED, s)
+                    return
 
             if s.expr:
-                is_lambda = isinstance(self.scope.top_function(), LambdaExpr)
                 declared_none_return = isinstance(return_type, NoneType)
                 declared_any_return = isinstance(return_type, AnyType)
 
@@ -7376,6 +7378,11 @@ def visit_erased_type(self, t: ErasedType) -> bool:
         # This can happen inside a lambda.
         return True
 
+    def visit_type_var(self, t: TypeVarType) -> bool:
+        # This is needed to prevent leaking into partial types during
+        # multi-step type inference.
+        return t.id.is_meta_var()
+
 
 class SetNothingToAny(TypeTranslator):
     """Replace all ambiguous <nothing> types with Any (to avoid spurious extra errors)."""
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index d00bbb288f3e..68ea7c30ed6f 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -17,7 +17,12 @@
 from mypy.checkstrformat import StringFormatterChecker
 from mypy.erasetype import erase_type, remove_instance_last_known_values, replace_meta_vars
 from mypy.errors import ErrorWatcher, report_internal_error
-from mypy.expandtype import expand_type, expand_type_by_instance, freshen_function_type_vars
+from mypy.expandtype import (
+    expand_type,
+    expand_type_by_instance,
+    freshen_all_functions_type_vars,
+    freshen_function_type_vars,
+)
 from mypy.infer import ArgumentInferContext, infer_function_type_arguments, infer_type_arguments
 from mypy.literals import literal
 from mypy.maptype import map_instance_to_supertype
@@ -122,6 +127,7 @@
     false_only,
     fixup_partial_type,
     function_type,
+    get_all_type_vars,
     get_type_vars,
     is_literal_type_like,
     make_simplified_union,
@@ -145,6 +151,7 @@
     LiteralValue,
     NoneType,
     Overloaded,
+    Parameters,
     ParamSpecFlavor,
     ParamSpecType,
     PartialType,
@@ -167,6 +174,7 @@
     get_proper_types,
     has_recursive_types,
     is_named_instance,
+    remove_dups,
     split_with_prefix_and_suffix,
 )
 from mypy.types_utils import (
@@ -1579,6 +1587,16 @@ def check_callable_call(
             lambda i: self.accept(args[i]),
         )
 
+        # This is tricky: return type may contain its own type variables, like in
+        # def [S] (S) -> def [T] (T) -> tuple[S, T], so we need to update their ids
+        # to avoid possible id clashes if this call itself appears in a generic
+        # function body.
+        ret_type = get_proper_type(callee.ret_type)
+        if isinstance(ret_type, CallableType) and ret_type.variables:
+            fresh_ret_type = freshen_all_functions_type_vars(callee.ret_type)
+            freeze_all_type_vars(fresh_ret_type)
+            callee = callee.copy_modified(ret_type=fresh_ret_type)
+
         if callee.is_generic():
             need_refresh = any(
                 isinstance(v, (ParamSpecType, TypeVarTupleType)) for v in callee.variables
@@ -1597,7 +1615,7 @@ def check_callable_call(
                     lambda i: self.accept(args[i]),
                 )
             callee = self.infer_function_type_arguments(
-                callee, args, arg_kinds, formal_to_actual, context
+                callee, args, arg_kinds, arg_names, formal_to_actual, need_refresh, context
             )
             if need_refresh:
                 formal_to_actual = map_actuals_to_formals(
@@ -1864,6 +1882,8 @@ def infer_function_type_arguments_using_context(
             #        def identity(x: T) -> T: return x
             #
             #        expects_literal(identity(3))  # Should type-check
+            # TODO: we may want to add similar exception if all arguments are lambdas, since
+            # in this case external context is almost everything we have.
             if not is_generic_instance(ctx) and not is_literal_type_like(ctx):
                 return callable.copy_modified()
         args = infer_type_arguments(callable.variables, ret_type, erased_ctx)
@@ -1885,7 +1905,9 @@ def infer_function_type_arguments(
         callee_type: CallableType,
         args: list[Expression],
         arg_kinds: list[ArgKind],
+        arg_names: Sequence[str | None] | None,
         formal_to_actual: list[list[int]],
+        need_refresh: bool,
         context: Context,
     ) -> CallableType:
         """Infer the type arguments for a generic callee type.
@@ -1927,7 +1949,14 @@ def infer_function_type_arguments(
             if 2 in arg_pass_nums:
                 # Second pass of type inference.
                 (callee_type, inferred_args) = self.infer_function_type_arguments_pass2(
-                    callee_type, args, arg_kinds, formal_to_actual, inferred_args, context
+                    callee_type,
+                    args,
+                    arg_kinds,
+                    arg_names,
+                    formal_to_actual,
+                    inferred_args,
+                    need_refresh,
+                    context,
                 )
 
             if (
@@ -1953,6 +1982,17 @@ def infer_function_type_arguments(
                 or set(get_type_vars(a)) & set(callee_type.variables)
                 for a in inferred_args
             ):
+                if need_refresh:
+                    # Technically we need to refresh formal_to_actual after *each* inference pass,
+                    # since each pass can expand ParamSpec or TypeVarTuple. Although such situations
+                    # are very rare, not doing this can cause crashes.
+                    formal_to_actual = map_actuals_to_formals(
+                        arg_kinds,
+                        arg_names,
+                        callee_type.arg_kinds,
+                        callee_type.arg_names,
+                        lambda a: self.accept(args[a]),
+                    )
                 # If the regular two-phase inference didn't work, try inferring type
                 # variables while allowing for polymorphic solutions, i.e. for solutions
                 # potentially involving free variables.
@@ -2000,8 +2040,10 @@ def infer_function_type_arguments_pass2(
         callee_type: CallableType,
         args: list[Expression],
         arg_kinds: list[ArgKind],
+        arg_names: Sequence[str | None] | None,
         formal_to_actual: list[list[int]],
         old_inferred_args: Sequence[Type | None],
+        need_refresh: bool,
         context: Context,
     ) -> tuple[CallableType, list[Type | None]]:
         """Perform second pass of generic function type argument inference.
@@ -2023,6 +2065,14 @@ def infer_function_type_arguments_pass2(
             if isinstance(arg, (NoneType, UninhabitedType)) or has_erased_component(arg):
                 inferred_args[i] = None
         callee_type = self.apply_generic_arguments(callee_type, inferred_args, context)
+        if need_refresh:
+            formal_to_actual = map_actuals_to_formals(
+                arg_kinds,
+                arg_names,
+                callee_type.arg_kinds,
+                callee_type.arg_names,
+                lambda a: self.accept(args[a]),
+            )
 
         arg_types = self.infer_arg_types_in_context(callee_type, args, arg_kinds, formal_to_actual)
 
@@ -4735,8 +4785,22 @@ def infer_lambda_type_using_context(
         # they must be considered as indeterminate. We use ErasedType since it
         # does not affect type inference results (it is for purposes like this
         # only).
-        callable_ctx = get_proper_type(replace_meta_vars(ctx, ErasedType()))
-        assert isinstance(callable_ctx, CallableType)
+        if self.chk.options.new_type_inference:
+            # With new type inference we can preserve argument types even if they
+            # are generic, since new inference algorithm can handle constraints
+            # like S <: T (we still erase return type since it's ultimately unknown).
+            extra_vars = []
+            for arg in ctx.arg_types:
+                meta_vars = [tv for tv in get_all_type_vars(arg) if tv.id.is_meta_var()]
+                extra_vars.extend([tv for tv in meta_vars if tv not in extra_vars])
+            callable_ctx = ctx.copy_modified(
+                ret_type=replace_meta_vars(ctx.ret_type, ErasedType()),
+                variables=list(ctx.variables) + extra_vars,
+            )
+        else:
+            erased_ctx = replace_meta_vars(ctx, ErasedType())
+            assert isinstance(erased_ctx, ProperType) and isinstance(erased_ctx, CallableType)
+            callable_ctx = erased_ctx
 
         # The callable_ctx may have a fallback of builtins.type if the context
         # is a constructor -- but this fallback doesn't make sense for lambdas.
@@ -5693,18 +5757,28 @@ def __init__(self, poly_tvars: Sequence[TypeVarLikeType]) -> None:
         self.bound_tvars: set[TypeVarLikeType] = set()
         self.seen_aliases: set[TypeInfo] = set()
 
-    def visit_callable_type(self, t: CallableType) -> Type:
-        found_vars = set()
+    def collect_vars(self, t: CallableType | Parameters) -> list[TypeVarLikeType]:
+        found_vars = []
         for arg in t.arg_types:
-            found_vars |= set(get_type_vars(arg)) & self.poly_tvars
+            for tv in get_all_type_vars(arg):
+                if isinstance(tv, ParamSpecType):
+                    normalized: TypeVarLikeType = tv.copy_modified(
+                        flavor=ParamSpecFlavor.BARE, prefix=Parameters([], [], [])
+                    )
+                else:
+                    normalized = tv
+                if normalized in self.poly_tvars and normalized not in self.bound_tvars:
+                    found_vars.append(normalized)
+        return remove_dups(found_vars)
 
-        found_vars -= self.bound_tvars
-        self.bound_tvars |= found_vars
+    def visit_callable_type(self, t: CallableType) -> Type:
+        found_vars = self.collect_vars(t)
+        self.bound_tvars |= set(found_vars)
         result = super().visit_callable_type(t)
-        self.bound_tvars -= found_vars
+        self.bound_tvars -= set(found_vars)
 
         assert isinstance(result, ProperType) and isinstance(result, CallableType)
-        result.variables = list(result.variables) + list(found_vars)
+        result.variables = list(result.variables) + found_vars
         return result
 
     def visit_type_var(self, t: TypeVarType) -> Type:
@@ -5713,8 +5787,9 @@ def visit_type_var(self, t: TypeVarType) -> Type:
         return super().visit_type_var(t)
 
     def visit_param_spec(self, t: ParamSpecType) -> Type:
-        # TODO: Support polymorphic apply for ParamSpec.
-        raise PolyTranslationError()
+        if t in self.poly_tvars and t not in self.bound_tvars:
+            raise PolyTranslationError()
+        return super().visit_param_spec(t)
 
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type:
         # TODO: Support polymorphic apply for TypeVarTuple.
@@ -5730,6 +5805,26 @@ def visit_type_alias_type(self, t: TypeAliasType) -> Type:
         raise PolyTranslationError()
 
     def visit_instance(self, t: Instance) -> Type:
+        if t.type.has_param_spec_type:
+            # We need this special-casing to preserve the possibility to store a
+            # generic function in an instance type. Things like
+            #     forall T . Foo[[x: T], T]
+            # are not really expressible in current type system, but this looks like
+            # a useful feature, so let's keep it.
+            param_spec_index = next(
+                i for (i, tv) in enumerate(t.type.defn.type_vars) if isinstance(tv, ParamSpecType)
+            )
+            p = get_proper_type(t.args[param_spec_index])
+            if isinstance(p, Parameters):
+                found_vars = self.collect_vars(p)
+                self.bound_tvars |= set(found_vars)
+                new_args = [a.accept(self) for a in t.args]
+                self.bound_tvars -= set(found_vars)
+
+                repl = new_args[param_spec_index]
+                assert isinstance(repl, ProperType) and isinstance(repl, Parameters)
+                repl.variables = list(repl.variables) + list(found_vars)
+                return t.copy_modified(args=new_args)
         # There is the same problem with callback protocols as with aliases
         # (callback protocols are essentially more flexible aliases to callables).
         # Note: consider supporting bindings in instances, e.g. LRUCache[[x: T], T].
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 299c6292a259..04c3378ce16b 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -9,7 +9,7 @@
 from mypy.argmap import ArgTypeExpander
 from mypy.erasetype import erase_typevars
 from mypy.maptype import map_instance_to_supertype
-from mypy.nodes import ARG_OPT, ARG_POS, CONTRAVARIANT, COVARIANT, ArgKind
+from mypy.nodes import ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, CONTRAVARIANT, COVARIANT, ArgKind
 from mypy.types import (
     TUPLE_LIKE_INSTANCE_NAMES,
     AnyType,
@@ -40,7 +40,6 @@
     UninhabitedType,
     UnionType,
     UnpackType,
-    callable_with_ellipsis,
     get_proper_type,
     has_recursive_types,
     has_type_vars,
@@ -166,6 +165,8 @@ def infer_constraints_for_callable(
                 actual_type = mapper.expand_actual_type(
                     actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i]
                 )
+                # TODO: if callee has ParamSpec, we need to collect all actuals that map to star
+                # args and create single constraint between P and resulting Parameters instead.
                 c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF)
                 constraints.extend(c)
 
@@ -577,10 +578,21 @@ def visit_unpack_type(self, template: UnpackType) -> list[Constraint]:
         raise RuntimeError("Mypy bug: unpack should be handled at a higher level.")
 
     def visit_parameters(self, template: Parameters) -> list[Constraint]:
-        # constraining Any against C[P] turns into infer_against_any([P], Any)
-        # ... which seems like the only case this can happen. Better to fail loudly.
+        # Constraining Any against C[P] turns into infer_against_any([P], Any)
+        # ... which seems like the only case this can happen. Better to fail loudly otherwise.
         if isinstance(self.actual, AnyType):
             return self.infer_against_any(template.arg_types, self.actual)
+        if type_state.infer_polymorphic and isinstance(self.actual, Parameters):
+            # For polymorphic inference we need to be able to infer secondary constraints
+            # in situations like [x: T] <: P <: [x: int].
+            res = []
+            if len(template.arg_types) == len(self.actual.arg_types):
+                for tt, at in zip(template.arg_types, self.actual.arg_types):
+                    # This avoids bogus constraints like T <: P.args
+                    if isinstance(at, ParamSpecType):
+                        continue
+                    res.extend(infer_constraints(tt, at, self.direction))
+            return res
         raise RuntimeError("Parameters cannot be constrained to")
 
     # Non-leaf types
@@ -686,7 +698,6 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                 # N.B: We use zip instead of indexing because the lengths might have
                 # mismatches during daemon reprocessing.
                 for tvar, mapped_arg, instance_arg in zip(tvars, mapped_args, instance_args):
-                    # TODO(PEP612): More ParamSpec work (or is Parameters the only thing accepted)
                     if isinstance(tvar, TypeVarType):
                         # The constraints for generic type parameters depend on variance.
                         # Include constraints from both directions if invariant.
@@ -697,25 +708,26 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                                 infer_constraints(mapped_arg, instance_arg, neg_op(self.direction))
                             )
                     elif isinstance(tvar, ParamSpecType) and isinstance(mapped_arg, ParamSpecType):
-                        suffix = get_proper_type(instance_arg)
-
-                        if isinstance(suffix, CallableType):
-                            prefix = mapped_arg.prefix
-                            from_concat = bool(prefix.arg_types) or suffix.from_concatenate
-                            suffix = suffix.copy_modified(from_concatenate=from_concat)
-
-                        if isinstance(suffix, (Parameters, CallableType)):
-                            # no such thing as variance for ParamSpecs
-                            # TODO: is there a case I am missing?
+                        prefix = mapped_arg.prefix
+                        if isinstance(instance_arg, Parameters):
+                            # No such thing as variance for ParamSpecs, consider them invariant
                             # TODO: constraints between prefixes
-                            prefix = mapped_arg.prefix
-                            suffix = suffix.copy_modified(
-                                suffix.arg_types[len(prefix.arg_types) :],
-                                suffix.arg_kinds[len(prefix.arg_kinds) :],
-                                suffix.arg_names[len(prefix.arg_names) :],
+                            suffix: Type = instance_arg.copy_modified(
+                                instance_arg.arg_types[len(prefix.arg_types) :],
+                                instance_arg.arg_kinds[len(prefix.arg_kinds) :],
+                                instance_arg.arg_names[len(prefix.arg_names) :],
                             )
+                            res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix))
                             res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix))
-                        elif isinstance(suffix, ParamSpecType):
+                        elif isinstance(instance_arg, ParamSpecType):
+                            suffix = instance_arg.copy_modified(
+                                prefix=Parameters(
+                                    instance_arg.prefix.arg_types[len(prefix.arg_types) :],
+                                    instance_arg.prefix.arg_kinds[len(prefix.arg_kinds) :],
+                                    instance_arg.prefix.arg_names[len(prefix.arg_names) :],
+                                )
+                            )
+                            res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix))
                             res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix))
                     else:
                         # This case should have been handled above.
@@ -767,26 +779,26 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                     elif isinstance(tvar, ParamSpecType) and isinstance(
                         template_arg, ParamSpecType
                     ):
-                        suffix = get_proper_type(mapped_arg)
-
-                        if isinstance(suffix, CallableType):
-                            prefix = template_arg.prefix
-                            from_concat = bool(prefix.arg_types) or suffix.from_concatenate
-                            suffix = suffix.copy_modified(from_concatenate=from_concat)
-
-                        if isinstance(suffix, (Parameters, CallableType)):
-                            # no such thing as variance for ParamSpecs
-                            # TODO: is there a case I am missing?
+                        prefix = template_arg.prefix
+                        if isinstance(mapped_arg, Parameters):
+                            # No such thing as variance for ParamSpecs, consider them invariant
                             # TODO: constraints between prefixes
-                            prefix = template_arg.prefix
-
-                            suffix = suffix.copy_modified(
-                                suffix.arg_types[len(prefix.arg_types) :],
-                                suffix.arg_kinds[len(prefix.arg_kinds) :],
-                                suffix.arg_names[len(prefix.arg_names) :],
+                            suffix = mapped_arg.copy_modified(
+                                mapped_arg.arg_types[len(prefix.arg_types) :],
+                                mapped_arg.arg_kinds[len(prefix.arg_kinds) :],
+                                mapped_arg.arg_names[len(prefix.arg_names) :],
                             )
+                            res.append(Constraint(template_arg, SUBTYPE_OF, suffix))
                             res.append(Constraint(template_arg, SUPERTYPE_OF, suffix))
-                        elif isinstance(suffix, ParamSpecType):
+                        elif isinstance(mapped_arg, ParamSpecType):
+                            suffix = mapped_arg.copy_modified(
+                                prefix=Parameters(
+                                    mapped_arg.prefix.arg_types[len(prefix.arg_types) :],
+                                    mapped_arg.prefix.arg_kinds[len(prefix.arg_kinds) :],
+                                    mapped_arg.prefix.arg_names[len(prefix.arg_names) :],
+                                )
+                            )
+                            res.append(Constraint(template_arg, SUBTYPE_OF, suffix))
                             res.append(Constraint(template_arg, SUPERTYPE_OF, suffix))
                     else:
                         # This case should have been handled above.
@@ -848,7 +860,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
         elif isinstance(actual, TupleType) and self.direction == SUPERTYPE_OF:
             return infer_constraints(template, mypy.typeops.tuple_fallback(actual), self.direction)
         elif isinstance(actual, TypeVarType):
-            if not actual.values:
+            if not actual.values and not actual.id.is_meta_var():
                 return infer_constraints(template, actual.upper_bound, self.direction)
             return []
         elif isinstance(actual, ParamSpecType):
@@ -892,6 +904,8 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
         # Normalize callables before matching against each other.
         # Note that non-normalized callables can be created in annotations
         # using e.g. callback protocols.
+        # TODO: check that callables match? Ideally we should not infer constraints
+        # callables that can never be subtypes of one another in given direction.
         template = template.with_unpacked_kwargs()
         extra_tvars = False
         if isinstance(self.actual, CallableType):
@@ -899,12 +913,10 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
             cactual = self.actual.with_unpacked_kwargs()
             param_spec = template.param_spec()
             if param_spec is None:
-                # FIX verify argument counts
                 # TODO: Erase template variables if it is generic?
                 if (
                     type_state.infer_polymorphic
                     and cactual.variables
-                    and cactual.param_spec() is None
                     and not self.skip_neg_op
                     # Technically, the correct inferred type for application of e.g.
                     # Callable[..., T] -> Callable[..., T] (with literal ellipsis), to a generic
@@ -926,7 +938,8 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                 # We can't infer constraints from arguments if the template is Callable[..., T]
                 # (with literal '...').
                 if not template.is_ellipsis_args:
-                    if find_unpack_in_list(template.arg_types) is not None:
+                    unpack_present = find_unpack_in_list(template.arg_types)
+                    if unpack_present is not None:
                         (
                             unpack_constraints,
                             cactual_args_t,
@@ -941,47 +954,70 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                     else:
                         template_args = template.arg_types
                         cactual_args = cactual.arg_types
-                    # The lengths should match, but don't crash (it will error elsewhere).
+                    # TODO: use some more principled "formal to actual" logic
+                    # instead of this lock-step loop over argument types. This identical
+                    # logic should be used in 5 places: in Parameters vs Parameters
+                    # inference, in Instance vs Instance inference for prefixes (two
+                    # branches), and in Callable vs Callable inference (two branches).
                     for t, a in zip(template_args, cactual_args):
+                        # This avoids bogus constraints like T <: P.args
+                        if isinstance(a, ParamSpecType):
+                            # TODO: can we infer something useful for *T vs P?
+                            continue
                         # Negate direction due to function argument type contravariance.
                         res.extend(infer_constraints(t, a, neg_op(self.direction)))
             else:
-                # sometimes, it appears we try to get constraints between two paramspec callables?
-
-                # TODO: Direction
-                # TODO: check the prefixes match
                 prefix = param_spec.prefix
                 prefix_len = len(prefix.arg_types)
                 cactual_ps = cactual.param_spec()
 
+                if type_state.infer_polymorphic and cactual.variables and not self.skip_neg_op:
+                    # Similar logic to the branch above.
+                    res.extend(
+                        infer_constraints(
+                            cactual, template, neg_op(self.direction), skip_neg_op=True
+                        )
+                    )
+                    extra_tvars = True
+
                 if not cactual_ps:
                     max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)])
                     prefix_len = min(prefix_len, max_prefix_len)
                     res.append(
                         Constraint(
                             param_spec,
-                            SUBTYPE_OF,
-                            cactual.copy_modified(
+                            neg_op(self.direction),
+                            Parameters(
                                 arg_types=cactual.arg_types[prefix_len:],
                                 arg_kinds=cactual.arg_kinds[prefix_len:],
                                 arg_names=cactual.arg_names[prefix_len:],
-                                ret_type=UninhabitedType(),
+                                variables=cactual.variables
+                                if not type_state.infer_polymorphic
+                                else [],
                             ),
                         )
                     )
                 else:
-                    res.append(Constraint(param_spec, SUBTYPE_OF, cactual_ps))
+                    if len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types):
+                        cactual_ps = cactual_ps.copy_modified(
+                            prefix=Parameters(
+                                arg_types=cactual_ps.prefix.arg_types[prefix_len:],
+                                arg_kinds=cactual_ps.prefix.arg_kinds[prefix_len:],
+                                arg_names=cactual_ps.prefix.arg_names[prefix_len:],
+                            )
+                        )
+                        res.append(Constraint(param_spec, neg_op(self.direction), cactual_ps))
 
-                # compare prefixes
+                # Compare prefixes as well
                 cactual_prefix = cactual.copy_modified(
                     arg_types=cactual.arg_types[:prefix_len],
                     arg_kinds=cactual.arg_kinds[:prefix_len],
                     arg_names=cactual.arg_names[:prefix_len],
                 )
 
-                # TODO: see above "FIX" comments for param_spec is None case
-                # TODO: this assumes positional arguments
                 for t, a in zip(prefix.arg_types, cactual_prefix.arg_types):
+                    if isinstance(a, ParamSpecType):
+                        continue
                     res.extend(infer_constraints(t, a, neg_op(self.direction)))
 
             template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type
@@ -993,7 +1029,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
             res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction))
             if extra_tvars:
                 for c in res:
-                    c.extra_tvars = list(cactual.variables)
+                    c.extra_tvars += cactual.variables
             return res
         elif isinstance(self.actual, AnyType):
             param_spec = template.param_spec()
@@ -1006,7 +1042,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                     Constraint(
                         param_spec,
                         SUBTYPE_OF,
-                        callable_with_ellipsis(any_type, any_type, template.fallback),
+                        Parameters([any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None]),
                     )
                 ]
             res.extend(infer_constraints(template.ret_type, any_type, self.direction))
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index b599b49e4c12..0e98ed048197 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -231,44 +231,27 @@ def visit_type_var(self, t: TypeVarType) -> Type:
         return repl
 
     def visit_param_spec(self, t: ParamSpecType) -> Type:
-        # set prefix to something empty so we don't duplicate it
-        repl = get_proper_type(
-            self.variables.get(t.id, t.copy_modified(prefix=Parameters([], [], [])))
-        )
-        if isinstance(repl, Instance):
-            # TODO: what does prefix mean in this case?
-            # TODO: why does this case even happen? Instances aren't plural.
-            return repl
-        elif isinstance(repl, (ParamSpecType, Parameters, CallableType)):
-            if isinstance(repl, ParamSpecType):
-                return repl.copy_modified(
-                    flavor=t.flavor,
-                    prefix=t.prefix.copy_modified(
-                        arg_types=t.prefix.arg_types + repl.prefix.arg_types,
-                        arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds,
-                        arg_names=t.prefix.arg_names + repl.prefix.arg_names,
-                    ),
-                )
-            else:
-                # if the paramspec is *P.args or **P.kwargs:
-                if t.flavor != ParamSpecFlavor.BARE:
-                    assert isinstance(repl, CallableType), "Should not be able to get here."
-                    # Is this always the right thing to do?
-                    param_spec = repl.param_spec()
-                    if param_spec:
-                        return param_spec.with_flavor(t.flavor)
-                    else:
-                        return repl
-                else:
-                    return Parameters(
-                        t.prefix.arg_types + repl.arg_types,
-                        t.prefix.arg_kinds + repl.arg_kinds,
-                        t.prefix.arg_names + repl.arg_names,
-                        variables=[*t.prefix.variables, *repl.variables],
-                    )
-
+        # Set prefix to something empty, so we don't duplicate it below.
+        repl = self.variables.get(t.id, t.copy_modified(prefix=Parameters([], [], [])))
+        if isinstance(repl, ParamSpecType):
+            return repl.copy_modified(
+                flavor=t.flavor,
+                prefix=t.prefix.copy_modified(
+                    arg_types=self.expand_types(t.prefix.arg_types + repl.prefix.arg_types),
+                    arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds,
+                    arg_names=t.prefix.arg_names + repl.prefix.arg_names,
+                ),
+            )
+        elif isinstance(repl, Parameters):
+            assert t.flavor == ParamSpecFlavor.BARE
+            return Parameters(
+                self.expand_types(t.prefix.arg_types + repl.arg_types),
+                t.prefix.arg_kinds + repl.arg_kinds,
+                t.prefix.arg_names + repl.arg_names,
+                variables=[*t.prefix.variables, *repl.variables],
+            )
         else:
-            # TODO: should this branch be removed? better not to fail silently
+            # TODO: replace this with "assert False"
             return repl
 
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type:
@@ -387,7 +370,7 @@ def interpolate_args_for_unpack(
     def visit_callable_type(self, t: CallableType) -> CallableType:
         param_spec = t.param_spec()
         if param_spec is not None:
-            repl = get_proper_type(self.variables.get(param_spec.id))
+            repl = self.variables.get(param_spec.id)
             # If a ParamSpec in a callable type is substituted with a
             # callable type, we can't use normal substitution logic,
             # since ParamSpec is actually split into two components
@@ -395,35 +378,30 @@ def visit_callable_type(self, t: CallableType) -> CallableType:
             # must expand both of them with all the argument types,
             # kinds and names in the replacement. The return type in
             # the replacement is ignored.
-            if isinstance(repl, (CallableType, Parameters)):
-                # Substitute *args: P.args, **kwargs: P.kwargs
-                prefix = param_spec.prefix
-                # we need to expand the types in the prefix, so might as well
-                # not get them in the first place
-                t = t.expand_param_spec(repl, no_prefix=True)
+            if isinstance(repl, Parameters):
+                # We need to expand both the types in the prefix and the ParamSpec itself
+                t = t.expand_param_spec(repl)
                 return t.copy_modified(
-                    arg_types=self.expand_types(prefix.arg_types) + t.arg_types,
-                    arg_kinds=prefix.arg_kinds + t.arg_kinds,
-                    arg_names=prefix.arg_names + t.arg_names,
+                    arg_types=self.expand_types(t.arg_types),
+                    arg_kinds=t.arg_kinds,
+                    arg_names=t.arg_names,
                     ret_type=t.ret_type.accept(self),
                     type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None),
                 )
-            # TODO: Conceptually, the "len(t.arg_types) == 2" should not be here. However, this
-            #       errors without it. Either figure out how to eliminate this or place an
-            #       explanation for why this is necessary.
-            elif isinstance(repl, ParamSpecType) and len(t.arg_types) == 2:
-                # We're substituting one paramspec for another; this can mean that the prefix
-                # changes. (e.g. sub Concatenate[int, P] for Q)
+            elif isinstance(repl, ParamSpecType):
+                # We're substituting one ParamSpec for another; this can mean that the prefix
+                # changes, e.g. substitute Concatenate[int, P] in place of Q.
                 prefix = repl.prefix
-                old_prefix = param_spec.prefix
-
-                # Check assumptions. I'm not sure what order to place new prefix vs old prefix:
-                assert not old_prefix.arg_types or not prefix.arg_types
-
-                t = t.copy_modified(
-                    arg_types=prefix.arg_types + old_prefix.arg_types + t.arg_types,
-                    arg_kinds=prefix.arg_kinds + old_prefix.arg_kinds + t.arg_kinds,
-                    arg_names=prefix.arg_names + old_prefix.arg_names + t.arg_names,
+                clean_repl = repl.copy_modified(prefix=Parameters([], [], []))
+                return t.copy_modified(
+                    arg_types=self.expand_types(t.arg_types[:-2] + prefix.arg_types)
+                    + [
+                        clean_repl.with_flavor(ParamSpecFlavor.ARGS),
+                        clean_repl.with_flavor(ParamSpecFlavor.KWARGS),
+                    ],
+                    arg_kinds=t.arg_kinds[:-2] + prefix.arg_kinds + t.arg_kinds[-2:],
+                    arg_names=t.arg_names[:-2] + prefix.arg_names + t.arg_names[-2:],
+                    ret_type=t.ret_type.accept(self),
                 )
 
         var_arg = t.var_arg()
diff --git a/mypy/join.py b/mypy/join.py
index f4af59f4e50b..806c644a680c 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -315,8 +315,14 @@ def visit_unpack_type(self, t: UnpackType) -> UnpackType:
         raise NotImplementedError
 
     def visit_parameters(self, t: Parameters) -> ProperType:
-        if self.s == t:
-            return t
+        if isinstance(self.s, Parameters):
+            if len(t.arg_types) != len(self.s.arg_types):
+                return self.default(self.s)
+            return t.copy_modified(
+                # Note that since during constraint inference we already treat whole ParamSpec as
+                # contravariant, we should join individual items, not meet them like for Callables
+                arg_types=[join_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)]
+            )
         else:
             return self.default(self.s)
 
diff --git a/mypy/meet.py b/mypy/meet.py
index 29c4d3663503..e3a22a226575 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -701,11 +701,12 @@ def visit_unpack_type(self, t: UnpackType) -> ProperType:
         raise NotImplementedError
 
     def visit_parameters(self, t: Parameters) -> ProperType:
-        # TODO: is this the right variance?
-        if isinstance(self.s, (Parameters, CallableType)):
+        if isinstance(self.s, Parameters):
             if len(t.arg_types) != len(self.s.arg_types):
                 return self.default(self.s)
             return t.copy_modified(
+                # Note that since during constraint inference we already treat whole ParamSpec as
+                # contravariant, we should meet individual items, not join them like for Callables
                 arg_types=[meet_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)]
             )
         else:
diff --git a/mypy/solve.py b/mypy/solve.py
index 72b3d6f26618..4b2b899c2a8d 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -6,17 +6,18 @@
 from typing import Iterable, Sequence
 from typing_extensions import TypeAlias as _TypeAlias
 
-from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints, neg_op
+from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints
 from mypy.expandtype import expand_type
 from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort
 from mypy.join import join_types
 from mypy.meet import meet_type_list, meet_types
 from mypy.subtypes import is_subtype
-from mypy.typeops import get_type_vars
+from mypy.typeops import get_all_type_vars
 from mypy.types import (
     AnyType,
     Instance,
     NoneType,
+    ParamSpecType,
     ProperType,
     Type,
     TypeOfAny,
@@ -26,7 +27,6 @@
     UninhabitedType,
     UnionType,
     get_proper_type,
-    remove_dups,
 )
 from mypy.typestate import type_state
 
@@ -62,10 +62,6 @@ def solve_constraints(
     for c in constraints:
         extra_vars.extend([v.id for v in c.extra_tvars if v.id not in vars + extra_vars])
         originals.update({v.id: v for v in c.extra_tvars if v.id not in originals})
-    if allow_polymorphic:
-        # Constraints like T :> S and S <: T are semantically the same, but they are
-        # represented differently. Normalize the constraint list w.r.t this equivalence.
-        constraints = normalize_constraints(constraints, vars + extra_vars)
 
     # Collect a list of constraints for each type variable.
     cmap: dict[TypeVarId, list[Constraint]] = {tv: [] for tv in vars + extra_vars}
@@ -334,23 +330,6 @@ def is_trivial_bound(tp: ProperType) -> bool:
     return isinstance(tp, Instance) and tp.type.fullname == "builtins.object"
 
 
-def normalize_constraints(
-    constraints: list[Constraint], vars: list[TypeVarId]
-) -> list[Constraint]:
-    """Normalize list of constraints (to simplify life for the non-linear solver).
-
-    This includes two things currently:
-      * Complement T :> S by S <: T
-      * Remove strict duplicates
-      * Remove constrains for unrelated variables
-    """
-    res = constraints.copy()
-    for c in constraints:
-        if isinstance(c.target, TypeVarType):
-            res.append(Constraint(c.target, neg_op(c.op), c.origin_type_var))
-    return [c for c in remove_dups(constraints) if c.type_var in vars]
-
-
 def transitive_closure(
     tvars: list[TypeVarId], constraints: list[Constraint]
 ) -> tuple[Graph, Bounds, Bounds]:
@@ -380,7 +359,14 @@ def transitive_closure(
     remaining = set(constraints)
     while remaining:
         c = remaining.pop()
-        if isinstance(c.target, TypeVarType) and c.target.id in tvars:
+        # Note that ParamSpec constraint P <: Q may be considered linear only if Q has no prefix,
+        # for cases like P <: Concatenate[T, Q] we should consider this non-linear and put {P} and
+        # {T, Q} into separate SCCs.
+        if (
+            isinstance(c.target, TypeVarType)
+            or isinstance(c.target, ParamSpecType)
+            and not c.target.prefix.arg_types
+        ) and c.target.id in tvars:
             if c.op == SUBTYPE_OF:
                 lower, upper = c.type_var, c.target.id
             else:
@@ -463,4 +449,4 @@ def check_linear(scc: set[TypeVarId], lowers: Bounds, uppers: Bounds) -> bool:
 
 def get_vars(target: Type, vars: list[TypeVarId]) -> set[TypeVarId]:
     """Find type variables for which we are solving in a target type."""
-    return {tv.id for tv in get_type_vars(target)} & set(vars)
+    return {tv.id for tv in get_all_type_vars(target)} & set(vars)
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index da92f7398d4e..60fccc7e357c 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -1705,11 +1705,15 @@ def unify_generic_callable(
         return_constraint_direction = mypy.constraints.SUBTYPE_OF
 
     constraints: list[mypy.constraints.Constraint] = []
-    for arg_type, target_arg_type in zip(type.arg_types, target.arg_types):
-        c = mypy.constraints.infer_constraints(
-            arg_type, target_arg_type, mypy.constraints.SUPERTYPE_OF
-        )
-        constraints.extend(c)
+    # There is some special logic for inference in callables, so better use them
+    # as wholes instead of picking separate arguments.
+    cs = mypy.constraints.infer_constraints(
+        type.copy_modified(ret_type=UninhabitedType()),
+        target.copy_modified(ret_type=UninhabitedType()),
+        mypy.constraints.SUBTYPE_OF,
+        skip_neg_op=True,
+    )
+    constraints.extend(cs)
     if not ignore_return:
         c = mypy.constraints.infer_constraints(
             type.ret_type, target.ret_type, return_constraint_direction
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
index 59457dfa5d3b..56ac86058ce4 100644
--- a/mypy/test/testtypes.py
+++ b/mypy/test/testtypes.py
@@ -1464,7 +1464,7 @@ def make_call(*items: tuple[str, str | None]) -> CallExpr:
 class TestExpandTypeLimitGetProperType(TestCase):
     # WARNING: do not increase this number unless absolutely necessary,
     # and you understand what you are doing.
-    ALLOWED_GET_PROPER_TYPES = 8
+    ALLOWED_GET_PROPER_TYPES = 6
 
     @skipUnless(mypy.expandtype.__file__.endswith(".py"), "Skip for compiled mypy")
     def test_count_get_proper_type(self) -> None:
diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py
index cbfa43a77b81..1860a43eb14f 100644
--- a/mypy/type_visitor.py
+++ b/mypy/type_visitor.py
@@ -348,7 +348,7 @@ def visit_type_var(self, t: TypeVarType) -> T:
         return self.query_types([t.upper_bound, t.default] + t.values)
 
     def visit_param_spec(self, t: ParamSpecType) -> T:
-        return self.query_types([t.upper_bound, t.default])
+        return self.query_types([t.upper_bound, t.default, t.prefix])
 
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> T:
         return self.query_types([t.upper_bound, t.default])
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index d894e2cc8c51..8ac73cdf8aac 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -1244,9 +1244,23 @@ def analyze_callable_type(self, t: UnboundType) -> Type:
                 )
             else:
                 # Callable[P, RET] (where P is ParamSpec)
-                maybe_ret = self.analyze_callable_args_for_paramspec(
-                    callable_args, ret_type, fallback
-                ) or self.analyze_callable_args_for_concatenate(callable_args, ret_type, fallback)
+                with self.tvar_scope_frame():
+                    # Temporarily bind ParamSpecs to allow code like this:
+                    #     my_fun: Callable[Q, Foo[Q]]
+                    # We usually do this later in visit_callable_type(), but the analysis
+                    # below happens at very early stage.
+                    variables = []
+                    for name, tvar_expr in self.find_type_var_likes(callable_args):
+                        variables.append(self.tvar_scope.bind_new(name, tvar_expr))
+                    maybe_ret = self.analyze_callable_args_for_paramspec(
+                        callable_args, ret_type, fallback
+                    ) or self.analyze_callable_args_for_concatenate(
+                        callable_args, ret_type, fallback
+                    )
+                    if maybe_ret:
+                        maybe_ret = maybe_ret.copy_modified(
+                            ret_type=ret_type.accept(self), variables=variables
+                        )
                 if maybe_ret is None:
                     # Callable[?, RET] (where ? is something invalid)
                     self.fail(
@@ -1532,6 +1546,7 @@ def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = Fa
             if analyzed.prefix.arg_types:
                 self.fail("Invalid location for Concatenate", t, code=codes.VALID_TYPE)
                 self.note("You can use Concatenate as the first argument to Callable", t)
+                analyzed = AnyType(TypeOfAny.from_error)
             else:
                 self.fail(
                     f'Invalid location for ParamSpec "{analyzed.name}"', t, code=codes.VALID_TYPE
@@ -1541,6 +1556,7 @@ def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = Fa
                     "'Callable[{}, int]'".format(analyzed.name),
                     t,
                 )
+                analyzed = AnyType(TypeOfAny.from_error)
         return analyzed
 
     def anal_var_def(self, var_def: TypeVarLikeType) -> TypeVarLikeType:
diff --git a/mypy/typeops.py b/mypy/typeops.py
index 4233cc1b2b33..d746ea701fde 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -948,22 +948,31 @@ def coerce_to_literal(typ: Type) -> Type:
 
 
 def get_type_vars(tp: Type) -> list[TypeVarType]:
-    return tp.accept(TypeVarExtractor())
+    return cast("list[TypeVarType]", tp.accept(TypeVarExtractor()))
 
 
-class TypeVarExtractor(TypeQuery[List[TypeVarType]]):
-    def __init__(self) -> None:
+def get_all_type_vars(tp: Type) -> list[TypeVarLikeType]:
+    # TODO: should we always use this function instead of get_type_vars() above?
+    return tp.accept(TypeVarExtractor(include_all=True))
+
+
+class TypeVarExtractor(TypeQuery[List[TypeVarLikeType]]):
+    def __init__(self, include_all: bool = False) -> None:
         super().__init__(self._merge)
+        self.include_all = include_all
 
-    def _merge(self, iter: Iterable[list[TypeVarType]]) -> list[TypeVarType]:
+    def _merge(self, iter: Iterable[list[TypeVarLikeType]]) -> list[TypeVarLikeType]:
         out = []
         for item in iter:
             out.extend(item)
         return out
 
-    def visit_type_var(self, t: TypeVarType) -> list[TypeVarType]:
+    def visit_type_var(self, t: TypeVarType) -> list[TypeVarLikeType]:
         return [t]
 
+    def visit_param_spec(self, t: ParamSpecType) -> list[TypeVarLikeType]:
+        return [t] if self.include_all else []
+
 
 def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool:
     """Does this type have a custom special method such as __format__() or __eq__()?
diff --git a/mypy/types.py b/mypy/types.py
index d13cff00c06d..359ca713616b 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1577,6 +1577,7 @@ def __init__(
         self.arg_kinds = arg_kinds
         self.arg_names = list(arg_names)
         assert len(arg_types) == len(arg_kinds) == len(arg_names)
+        assert not any(isinstance(t, Parameters) for t in arg_types)
         self.min_args = arg_kinds.count(ARG_POS)
         self.is_ellipsis_args = is_ellipsis_args
         self.variables = variables or []
@@ -1788,6 +1789,11 @@ def __init__(
     ) -> None:
         super().__init__(line, column)
         assert len(arg_types) == len(arg_kinds) == len(arg_names)
+        for t, k in zip(arg_types, arg_kinds):
+            if isinstance(t, ParamSpecType):
+                assert not t.prefix.arg_types
+                # TODO: should we assert that only ARG_STAR contain ParamSpecType?
+                # See testParamSpecJoin, that relies on passing e.g `P.args` as plain argument.
         if variables is None:
             variables = []
         self.arg_types = list(arg_types)
@@ -2033,36 +2039,21 @@ def param_spec(self) -> ParamSpecType | None:
         if not isinstance(arg_type, ParamSpecType):
             return None
 
-        # sometimes paramspectypes are analyzed in from mysterious places,
-        # e.g. def f(prefix..., *args: P.args, **kwargs: P.kwargs) -> ...: ...
-        prefix = arg_type.prefix
-        if not prefix.arg_types:
-            # TODO: confirm that all arg kinds are positional
-            prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2])
-
+        # Prepend prefix for def f(prefix..., *args: P.args, **kwargs: P.kwargs) -> ...
+        # TODO: confirm that all arg kinds are positional
+        prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2])
         return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix)
 
-    def expand_param_spec(
-        self, c: CallableType | Parameters, no_prefix: bool = False
-    ) -> CallableType:
+    def expand_param_spec(self, c: Parameters) -> CallableType:
+        # TODO: try deleting variables from Parameters after new type inference is default.
         variables = c.variables
-
-        if no_prefix:
-            return self.copy_modified(
-                arg_types=c.arg_types,
-                arg_kinds=c.arg_kinds,
-                arg_names=c.arg_names,
-                is_ellipsis_args=c.is_ellipsis_args,
-                variables=[*variables, *self.variables],
-            )
-        else:
-            return self.copy_modified(
-                arg_types=self.arg_types[:-2] + c.arg_types,
-                arg_kinds=self.arg_kinds[:-2] + c.arg_kinds,
-                arg_names=self.arg_names[:-2] + c.arg_names,
-                is_ellipsis_args=c.is_ellipsis_args,
-                variables=[*variables, *self.variables],
-            )
+        return self.copy_modified(
+            arg_types=self.arg_types[:-2] + c.arg_types,
+            arg_kinds=self.arg_kinds[:-2] + c.arg_kinds,
+            arg_names=self.arg_names[:-2] + c.arg_names,
+            is_ellipsis_args=c.is_ellipsis_args,
+            variables=[*variables, *self.variables],
+        )
 
     def with_unpacked_kwargs(self) -> NormalizedCallableType:
         if not self.unpack_kwargs:
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
index a8722d8190b9..f49541420cc0 100644
--- a/test-data/unit/check-functions.test
+++ b/test-data/unit/check-functions.test
@@ -2330,7 +2330,7 @@ T = TypeVar('T')
 def deco() -> Callable[[T], T]: pass
 reveal_type(deco)  # N: Revealed type is "def () -> def [T] (T`-1) -> T`-1"
 f = deco()
-reveal_type(f)  # N: Revealed type is "def [T] (T`-1) -> T`-1"
+reveal_type(f)  # N: Revealed type is "def [T] (T`1) -> T`1"
 i = f(3)
 reveal_type(i)  # N: Revealed type is "builtins.int"
 
@@ -2343,7 +2343,7 @@ U = TypeVar('U')
 def deco(x: U) -> Callable[[T, U], T]: pass
 reveal_type(deco)  # N: Revealed type is "def [U] (x: U`-1) -> def [T] (T`-2, U`-1) -> T`-2"
 f = deco("foo")
-reveal_type(f)  # N: Revealed type is "def [T] (T`-2, builtins.str) -> T`-2"
+reveal_type(f)  # N: Revealed type is "def [T] (T`1, builtins.str) -> T`1"
 i = f(3, "eggs")
 reveal_type(i)  # N: Revealed type is "builtins.int"
 
@@ -2354,9 +2354,9 @@ T = TypeVar('T')
 R = TypeVar('R')
 def deco() -> Callable[[T], Callable[[T, R], R]]: pass
 f = deco()
-reveal_type(f)  # N: Revealed type is "def [T] (T`-1) -> def [R] (T`-1, R`-2) -> R`-2"
+reveal_type(f)  # N: Revealed type is "def [T] (T`2) -> def [R] (T`2, R`1) -> R`1"
 g = f(3)
-reveal_type(g)  # N: Revealed type is "def [R] (builtins.int, R`-2) -> R`-2"
+reveal_type(g)  # N: Revealed type is "def [R] (builtins.int, R`3) -> R`3"
 s = g(4, "foo")
 reveal_type(s)  # N: Revealed type is "builtins.str"
 
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index d1842a74d634..8c7c4e035961 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -2713,6 +2713,7 @@ reveal_type(func(1))  # N: Revealed type is "builtins.int"
 [builtins fixtures/tuple.pyi]
 
 [case testGenericLambdaGenericMethodNoCrash]
+# flags: --new-type-inference
 from typing import TypeVar, Union, Callable, Generic
 
 S = TypeVar("S")
@@ -2723,7 +2724,7 @@ def f(x: Callable[[G[T]], int]) -> T: ...
 class G(Generic[T]):
     def g(self, x: S) -> Union[S, T]: ...
 
-f(lambda x: x.g(0))  # E: Cannot infer type argument 1 of "f"
+f(lambda x: x.g(0))  # E: Incompatible return value type (got "Union[int, T]", expected "int")
 
 [case testDictStarInference]
 class B: ...
@@ -3035,3 +3036,230 @@ reveal_type(dec1(id2))  # N: Revealed type is "def [S in (builtins.int, builtins
 reveal_type(dec2(id1))  # N: Revealed type is "def [UC <: __main__.C] (UC`5) -> builtins.list[UC`5]"
 reveal_type(dec2(id2))  # N: Revealed type is "def (<nothing>) -> builtins.list[<nothing>]" \
                         # E: Argument 1 to "dec2" has incompatible type "Callable[[V], V]"; expected "Callable[[<nothing>], <nothing>]"
+
+[case testInferenceAgainstGenericLambdas]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List
+
+S = TypeVar('S')
+T = TypeVar('T')
+
+def dec1(f: Callable[[T], T]) -> Callable[[T], List[T]]:
+    ...
+def dec2(f: Callable[[S], T]) -> Callable[[S], List[T]]:
+    ...
+def dec3(f: Callable[[List[S]], T]) -> Callable[[S], T]:
+    def g(x: S) -> T:
+        return f([x])
+    return g
+def dec4(f: Callable[[S], List[T]]) -> Callable[[S], T]:
+    ...
+def dec5(f: Callable[[int], T]) -> Callable[[int], List[T]]:
+    def g(x: int) -> List[T]:
+        return [f(x)] * x
+    return g
+
+reveal_type(dec1(lambda x: x))  # N: Revealed type is "def [T] (T`3) -> builtins.list[T`3]"
+reveal_type(dec2(lambda x: x))  # N: Revealed type is "def [S] (S`4) -> builtins.list[S`4]"
+reveal_type(dec3(lambda x: x[0]))  # N: Revealed type is "def [S] (S`6) -> S`6"
+reveal_type(dec4(lambda x: [x]))  # N: Revealed type is "def [S] (S`8) -> S`8"
+reveal_type(dec1(lambda x: 1))  # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]"
+reveal_type(dec5(lambda x: x))  # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]"
+reveal_type(dec3(lambda x: x))  # N: Revealed type is "def [S] (S`15) -> builtins.list[S`15]"
+dec4(lambda x: x)  # E: Incompatible return value type (got "S", expected "List[object]")
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericParamSpecBasicInList]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Tuple
+from typing_extensions import ParamSpec
+
+T = TypeVar('T')
+P = ParamSpec('P')
+U = TypeVar('U')
+V = TypeVar('V')
+
+def dec(f: Callable[P, T]) -> Callable[P, List[T]]: ...
+def id(x: U) -> U: ...
+def either(x: U, y: U) -> U: ...
+def pair(x: U, y: V) -> Tuple[U, V]: ...
+reveal_type(dec(id))  # N: Revealed type is "def [T] (x: T`2) -> builtins.list[T`2]"
+reveal_type(dec(either))  # N: Revealed type is "def [T] (x: T`4, y: T`4) -> builtins.list[T`4]"
+reveal_type(dec(pair))  # N: Revealed type is "def [U, V] (x: U`-1, y: V`-2) -> builtins.list[Tuple[U`-1, V`-2]]"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericParamSpecBasicDeList]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Tuple
+from typing_extensions import ParamSpec
+
+T = TypeVar('T')
+P = ParamSpec('P')
+U = TypeVar('U')
+V = TypeVar('V')
+
+def dec(f: Callable[P, List[T]]) -> Callable[P, T]: ...
+def id(x: U) -> U: ...
+def either(x: U, y: U) -> U: ...
+reveal_type(dec(id))  # N: Revealed type is "def [T] (x: builtins.list[T`2]) -> T`2"
+reveal_type(dec(either))  # N: Revealed type is "def [T] (x: builtins.list[T`4], y: builtins.list[T`4]) -> T`4"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericParamSpecPopOff]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Tuple
+from typing_extensions import ParamSpec, Concatenate
+
+T = TypeVar('T')
+S = TypeVar('S')
+P = ParamSpec('P')
+U = TypeVar('U')
+V = TypeVar('V')
+
+def dec(f: Callable[Concatenate[T, P], S]) -> Callable[P, Callable[[T], S]]: ...
+def id(x: U) -> U: ...
+def either(x: U, y: U) -> U: ...
+def pair(x: U, y: V) -> Tuple[U, V]: ...
+reveal_type(dec(id))  # N: Revealed type is "def () -> def [T] (T`1) -> T`1"
+reveal_type(dec(either))  # N: Revealed type is "def [T] (y: T`4) -> def (T`4) -> T`4"
+reveal_type(dec(pair))  # N: Revealed type is "def [V] (y: V`-2) -> def [T] (T`7) -> Tuple[T`7, V`-2]"
+reveal_type(dec(dec))  # N: Revealed type is "def () -> def [T, P, S] (def (T`-1, *P.args, **P.kwargs) -> S`-3) -> def (*P.args, **P.kwargs) -> def (T`-1) -> S`-3"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericParamSpecPopOn]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Tuple
+from typing_extensions import ParamSpec, Concatenate
+
+T = TypeVar('T')
+S = TypeVar('S')
+P = ParamSpec('P')
+U = TypeVar('U')
+V = TypeVar('V')
+
+def dec(f: Callable[P, Callable[[T], S]]) -> Callable[Concatenate[T, P], S]: ...
+def id() -> Callable[[U], U]: ...
+def either(x: U) -> Callable[[U], U]: ...
+def pair(x: U) -> Callable[[V], Tuple[V, U]]: ...
+reveal_type(dec(id))  # N: Revealed type is "def [T] (T`2) -> T`2"
+reveal_type(dec(either))  # N: Revealed type is "def [T] (T`5, x: T`5) -> T`5"
+reveal_type(dec(pair))  # N: Revealed type is "def [T, U] (T`8, x: U`-1) -> Tuple[T`8, U`-1]"
+# This is counter-intuitive but looks correct, dec matches itself only if P is empty
+reveal_type(dec(dec))  # N: Revealed type is "def [T, S] (T`11, f: def () -> def (T`11) -> S`12) -> S`12"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericParamSpecVsParamSpec]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Tuple, Generic
+from typing_extensions import ParamSpec, Concatenate
+
+T = TypeVar('T')
+P = ParamSpec('P')
+Q = ParamSpec('Q')
+
+class Foo(Generic[P]): ...
+class Bar(Generic[P, T]): ...
+
+def dec(f: Callable[P, T]) -> Callable[P, List[T]]: ...
+def f(*args: Q.args, **kwargs: Q.kwargs) -> Foo[Q]: ...
+reveal_type(dec(f))  # N: Revealed type is "def [P] (*P.args, **P.kwargs) -> builtins.list[__main__.Foo[P`1]]"
+g: Callable[Concatenate[int, Q], Foo[Q]]
+reveal_type(dec(g))  # N: Revealed type is "def [Q] (builtins.int, *Q.args, **Q.kwargs) -> builtins.list[__main__.Foo[Q`-1]]"
+h: Callable[Concatenate[T, Q], Bar[Q, T]]
+reveal_type(dec(h))  # N: Revealed type is "def [T, Q] (T`-1, *Q.args, **Q.kwargs) -> builtins.list[__main__.Bar[Q`-2, T`-1]]"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericParamSpecVsParamSpecConcatenate]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Tuple, Generic
+from typing_extensions import ParamSpec, Concatenate
+
+T = TypeVar('T')
+P = ParamSpec('P')
+Q = ParamSpec('Q')
+
+class Foo(Generic[P]): ...
+class Bar(Generic[P, T]): ...
+
+def dec(f: Callable[P, int]) -> Callable[P, Foo[P]]: ...
+h: Callable[Concatenate[T, Q], int]
+g: Callable[Concatenate[T, Q], int]
+h = g
+reveal_type(dec(h))  # N: Revealed type is "def [T, Q] (T`-1, *Q.args, **Q.kwargs) -> __main__.Foo[[T`-1, **Q`-2]]"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericParamSpecSecondary]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Tuple, Generic
+from typing_extensions import ParamSpec, Concatenate
+
+T = TypeVar('T')
+P = ParamSpec('P')
+Q = ParamSpec('Q')
+
+class Foo(Generic[P]): ...
+
+def dec(f: Callable[P, Foo[P]]) -> Callable[P, Foo[P]]: ...
+g: Callable[[T], Foo[[int]]]
+reveal_type(dec(g))  # N: Revealed type is "def (builtins.int) -> __main__.Foo[[builtins.int]]"
+h: Callable[Q, Foo[[int]]]
+reveal_type(dec(g))  # N: Revealed type is "def (builtins.int) -> __main__.Foo[[builtins.int]]"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericParamSpecSecondOrder]
+# flags: --new-type-inference
+from typing import TypeVar, Callable
+from typing_extensions import ParamSpec, Concatenate
+
+T = TypeVar('T')
+S = TypeVar('S')
+P = ParamSpec('P')
+Q = ParamSpec('Q')
+U = TypeVar('U')
+W = ParamSpec('W')
+
+def transform(
+    dec: Callable[[Callable[P, T]], Callable[Q, S]]
+) -> Callable[[Callable[Concatenate[int, P], T]], Callable[Concatenate[int, Q], S]]: ...
+
+def dec(f: Callable[W, U]) -> Callable[W, U]: ...
+def dec2(f: Callable[Concatenate[str, W], U]) -> Callable[Concatenate[bytes, W], U]: ...
+reveal_type(transform(dec))  # N: Revealed type is "def [P, T] (def (builtins.int, *P.args, **P.kwargs) -> T`2) -> def (builtins.int, *P.args, **P.kwargs) -> T`2"
+reveal_type(transform(dec2))  # N: Revealed type is "def [W, T] (def (builtins.int, builtins.str, *W.args, **W.kwargs) -> T`6) -> def (builtins.int, builtins.bytes, *W.args, **W.kwargs) -> T`6"
+[builtins fixtures/tuple.pyi]
+
+[case testNoAccidentalVariableClashInNestedGeneric]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, Generic, Tuple
+
+T = TypeVar('T')
+S = TypeVar('S')
+U = TypeVar('U')
+
+def pipe(x: T, f1: Callable[[T], S], f2: Callable[[S], U]) -> U: ...
+def and_then(a: T) -> Callable[[S], Tuple[S, T]]: ...
+
+def apply(a: S, b: T) -> None:
+    v1 = and_then(b)
+    v2: Callable[[Tuple[S, T]], None]
+    return pipe(a, v1, v2)
+[builtins fixtures/tuple.pyi]
+
+[case testInferenceAgainstGenericParamSpecSpuriousBoundsNotUsed]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, Generic
+from typing_extensions import ParamSpec, Concatenate
+
+Q = ParamSpec("Q")
+class Foo(Generic[Q]): ...
+
+T1 = TypeVar("T1", bound=Foo[...])
+T2 = TypeVar("T2", bound=Foo[...])
+P = ParamSpec("P")
+def pop_off(fn: Callable[Concatenate[T1, P], T2]) -> Callable[P, Callable[[T1], T2]]:
+    ...
+
+@pop_off
+def test(command: Foo[Q]) -> Foo[Q]: ...
+reveal_type(test)  # N: Revealed type is "def () -> def [Q] (__main__.Foo[Q`-1]) -> __main__.Foo[Q`-1]"
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
index ba36c1548532..5f25b007dd47 100644
--- a/test-data/unit/check-inference-context.test
+++ b/test-data/unit/check-inference-context.test
@@ -693,6 +693,7 @@ f(lambda: None)
 g(lambda: None)
 
 [case testIsinstanceInInferredLambda]
+# flags: --new-type-inference
 from typing import TypeVar, Callable, Optional
 T = TypeVar('T')
 S = TypeVar('S')
@@ -700,7 +701,7 @@ class A: pass
 class B(A): pass
 class C(A): pass
 def f(func: Callable[[T], S], *z: T, r: Optional[S] = None) -> S: pass
-f(lambda x: 0 if isinstance(x, B) else 1) # E: Cannot infer type argument 1 of "f"
+reveal_type(f(lambda x: 0 if isinstance(x, B) else 1))  # N: Revealed type is "builtins.int"
 f(lambda x: 0 if isinstance(x, B) else 1, A())() # E: "int" not callable
 f(lambda x: x if isinstance(x, B) else B(), A(), r=B())() # E: "B" not callable
 f(
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index e0f29a19ec1d..9ee30b4df859 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -1375,19 +1375,19 @@ class B: pass
 [builtins fixtures/list.pyi]
 
 [case testUninferableLambda]
+# flags: --new-type-inference
 from typing import TypeVar, Callable
 X = TypeVar('X')
 def f(x: Callable[[X], X]) -> X: pass
-y = f(lambda x: x) # E: Cannot infer type argument 1 of "f"
+y = f(lambda x: x)  # E: Need type annotation for "y"
 
 [case testUninferableLambdaWithTypeError]
+# flags: --new-type-inference
 from typing import TypeVar, Callable
 X = TypeVar('X')
 def f(x: Callable[[X], X], y: str) -> X: pass
-y = f(lambda x: x, 1) # Fail
-[out]
-main:4: error: Cannot infer type argument 1 of "f"
-main:4: error: Argument 2 to "f" has incompatible type "int"; expected "str"
+y = f(lambda x: x, 1) # E: Need type annotation for "y" \
+                      # E: Argument 2 to "f" has incompatible type "int"; expected "str"
 
 [case testInferLambdaNone]
 # flags: --no-strict-optional
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index 4910dfe05d31..e59b12d47980 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -6483,7 +6483,7 @@ P = ParamSpec("P")
 R = TypeVar("R")
 
 @overload
-def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
+def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ...
 @overload
 def func(x: Callable[P, R]) -> Callable[Concatenate[str, P], R]: ...
 def func(x: Callable[..., R]) -> Callable[..., R]: ...
@@ -6501,7 +6501,7 @@ eggs = lambda: 'eggs'
 reveal_type(func(eggs))  # N: Revealed type is "def (builtins.str) -> builtins.str"
 
 spam: Callable[..., str] = lambda x, y: 'baz'
-reveal_type(func(spam))  # N: Revealed type is "def (*Any, **Any) -> builtins.str"
+reveal_type(func(spam))  # N: Revealed type is "def (*Any, **Any) -> Any"
 
 [builtins fixtures/paramspec.pyi]
 
@@ -6596,3 +6596,20 @@ S = TypeVar("S", bound=str)
 def foo(x: int = ...) -> Callable[[T], T]: ...
 @overload
 def foo(x: S = ...) -> Callable[[T], T]: ...
+
+[case testOverloadGenericStarArgOverlap]
+from typing import Any, Callable, TypeVar, overload, Union, Tuple, List
+
+F = TypeVar("F", bound=Callable[..., Any])
+S = TypeVar("S", bound=int)
+
+def id(f: F) -> F: ...
+
+@overload
+def struct(*cols: S) -> int: ...
+@overload
+def struct(__cols: Union[List[S], Tuple[S, ...]]) -> int: ...
+@id
+def struct(*cols: Union[S, Union[List[S], Tuple[S, ...]]]) -> int:
+    pass
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index 114fe1f8438a..f523cb005a2c 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1029,7 +1029,7 @@ j = Job(generic_f)
 reveal_type(j)  # N: Revealed type is "__main__.Job[[x: _T`-1]]"
 
 jf = j.into_callable()
-reveal_type(jf)  # N: Revealed type is "def [_T] (x: _T`-1)"
+reveal_type(jf)  # N: Revealed type is "def [_T] (x: _T`2)"
 reveal_type(jf(1))  # N: Revealed type is "None"
 [builtins fixtures/paramspec.pyi]
 
@@ -1048,10 +1048,10 @@ class Job(Generic[_P, _T]):
 def generic_f(x: _T) -> _T: ...
 
 j = Job(generic_f)
-reveal_type(j)  # N: Revealed type is "__main__.Job[[x: _T`-1], _T`-1]"
+reveal_type(j)  # N: Revealed type is "__main__.Job[[x: _T`2], _T`2]"
 
 jf = j.into_callable()
-reveal_type(jf)  # N: Revealed type is "def [_T] (x: _T`-1) -> _T`-1"
+reveal_type(jf)  # N: Revealed type is "def [_T] (x: _T`3) -> _T`3"
 reveal_type(jf(1))  # N: Revealed type is "builtins.int"
 [builtins fixtures/paramspec.pyi]
 
@@ -1307,7 +1307,7 @@ reveal_type(bar(C(fn=foo, x=1)))  # N: Revealed type is "__main__.C[[x: builtins
 [builtins fixtures/paramspec.pyi]
 
 [case testParamSpecClassConstructor]
-from typing import ParamSpec, Callable
+from typing import ParamSpec, Callable, TypeVar
 
 P = ParamSpec("P")
 
@@ -1315,7 +1315,10 @@ class SomeClass:
     def __init__(self, a: str) -> None:
         pass
 
-def func(t: Callable[P, SomeClass], val: Callable[P, SomeClass]) -> None:
+def func(t: Callable[P, SomeClass], val: Callable[P, SomeClass]) -> Callable[P, SomeClass]:
+    pass
+
+def func_regular(t: Callable[[T], SomeClass], val: Callable[[T], SomeClass]) -> Callable[[T], SomeClass]:
     pass
 
 def constructor(a: str) -> SomeClass:
@@ -1324,9 +1327,13 @@ def constructor(a: str) -> SomeClass:
 def wrong_constructor(a: bool) -> SomeClass:
     return SomeClass("a")
 
+def wrong_name_constructor(b: bool) -> SomeClass:
+    return SomeClass("a")
+
 func(SomeClass, constructor)
-func(SomeClass, wrong_constructor)  # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[VarArg(<nothing>), KwArg(<nothing>)], SomeClass]" \
-				    # E: Argument 2 to "func" has incompatible type "Callable[[bool], SomeClass]"; expected "Callable[[VarArg(<nothing>), KwArg(<nothing>)], SomeClass]"
+reveal_type(func(SomeClass, wrong_constructor))  # N: Revealed type is "def (a: <nothing>) -> __main__.SomeClass"
+reveal_type(func_regular(SomeClass, wrong_constructor))  # N: Revealed type is "def (<nothing>) -> __main__.SomeClass"
+func(SomeClass, wrong_name_constructor)  # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[<nothing>], SomeClass]"
 [builtins fixtures/paramspec.pyi]
 
 [case testParamSpecInTypeAliasBasic]
@@ -1466,8 +1473,7 @@ reveal_type(gs)  # N: Revealed type is "builtins.list[def (builtins.int, builtin
 
 T = TypeVar("T")
 class C(Generic[T]): ...
-C[Callable[P, int]]()  # E: The first argument to Callable must be a list of types, parameter specification, or "..." \
-                       # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas
+C[Callable[P, int]]()
 [builtins fixtures/paramspec.pyi]
 
 [case testConcatDeferralNoCrash]
@@ -1547,5 +1553,26 @@ U = TypeVar("U")
 def dec(f: Callable[P, T]) -> Callable[P, List[T]]: ...
 def test(x: U) -> U: ...
 reveal_type(dec)  # N: Revealed type is "def [P, T] (f: def (*P.args, **P.kwargs) -> T`-2) -> def (*P.args, **P.kwargs) -> builtins.list[T`-2]"
-reveal_type(dec(test))  # N: Revealed type is "def [U] (x: U`-1) -> builtins.list[U`-1]"
+reveal_type(dec(test))  # N: Revealed type is "def [T] (x: T`2) -> builtins.list[T`2]"
+
+class A: ...
+TA = TypeVar("TA", bound=A)
+
+def test_with_bound(x: TA) -> TA: ...
+reveal_type(dec(test_with_bound))  # N: Revealed type is "def [T <: __main__.A] (x: T`4) -> builtins.list[T`4]"
+dec(test_with_bound)(0)  # E: Value of type variable "T" of function cannot be "int"
+dec(test_with_bound)(A())  # OK
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecNestedApplyNoCrash]
+from typing import Callable, TypeVar
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+def apply(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: ...
+def test() -> None: ...
+# TODO: avoid this error, although it may be non-trivial.
+apply(apply, test)  # E: Argument 2 to "apply" has incompatible type "Callable[[], None]"; expected "Callable[P, T]"
 [builtins fixtures/paramspec.pyi]

From 76c16a484b8b4fa6ae10abf4e96a286315ed7093 Mon Sep 17 00:00:00 2001
From: Valentin Stanciu <250871+svalentin@users.noreply.github.com>
Date: Wed, 16 Aug 2023 13:15:42 +0100
Subject: [PATCH 058/288] [dmypy] special case stdout and stderr in show_stats
 too (#15881)

When running dmypy, the communication between client and server is via
JSON. The JSON contains the keys "out" and "err" for the actual result
of "check" command, and "stdout" and "stderr" for the any other stdout
and stderr text.
show_stats is shown when running with --verbose. It's meant to show
other keys in the JSON response, like python version or time taken. It
already had some special casing to only show 1 line of text for "out"
and "err". Let's add "stdout" and "stderr" to the special casing as
well. Also, let's show the remaining number of characters as well.

Finally, added a comment in code about stdout, stderr, out, err and how
we shouldn't confuse them. (I did)

Some more cleanup is needed in this area of the codebase, but will be a
separate PR.

show_stats outputs something like this:
```
err                     :
out                     : analytics/scripts/presto/report_query_lo ... 100 more characters
platform                :      linux
python_version          :        3_9
roundtrip_time          :     31.996
status                  :          2
stderr                  : \nLOG:  Mypy Version:           1.6.0+de ... 50186630 more characters
stdout                  :
```
---
 mypy/dmypy/client.py | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)

diff --git a/mypy/dmypy/client.py b/mypy/dmypy/client.py
index 0e9120608509..c3a2308d1b44 100644
--- a/mypy/dmypy/client.py
+++ b/mypy/dmypy/client.py
@@ -562,6 +562,7 @@ def check_output(
     sys.stdout.write(out)
     sys.stdout.flush()
     sys.stderr.write(err)
+    sys.stderr.flush()
     if verbose:
         show_stats(response)
     if junit_xml:
@@ -588,13 +589,14 @@ def check_output(
 
 def show_stats(response: Mapping[str, object]) -> None:
     for key, value in sorted(response.items()):
-        if key not in ("out", "err"):
-            print("%-24s: %10s" % (key, "%.3f" % value if isinstance(value, float) else value))
-        else:
+        if key in ("out", "err", "stdout", "stderr"):
+            # Special case text output to display just 40 characters of text
             value = repr(value)[1:-1]
             if len(value) > 50:
-                value = value[:40] + " ..."
+                value = f"{value[:40]} ... {len(value)-40} more characters"
             print("%-24s: %s" % (key, value))
+            continue
+        print("%-24s: %10s" % (key, "%.3f" % value if isinstance(value, float) else value))
 
 
 @action(hang_parser)
@@ -668,6 +670,8 @@ def request(
     # TODO: Other errors, e.g. ValueError, UnicodeError
     else:
         # Display debugging output written to stdout/stderr in the server process for convenience.
+        # This should not be confused with "out" and "err" fields in the response.
+        # Those fields hold the output of the "check" command, and are handled in check_output().
         stdout = response.get("stdout")
         if stdout:
             sys.stdout.write(stdout)

From b3d09374dac20c8e775e4380a6b44a56d7b22699 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 17 Aug 2023 15:35:22 +0100
Subject: [PATCH 059/288] Fix subtyping between ParamSpecs (#15892)

Fixes https://github.com/python/mypy/issues/14169
Fixes https://github.com/python/mypy/issues/14168

Two sings here:
* Actually check prefix when we should
* `strict_concatenate` check should be off by default (IIUC it is not
mandated by the PEP)
---
 mypy/expandtype.py                            |  3 +-
 mypy/messages.py                              | 18 +++--
 mypy/subtypes.py                              | 17 +++--
 test-data/unit/check-overloading.test         |  2 +-
 .../unit/check-parameter-specification.test   | 70 +++++++++++++++++++
 5 files changed, 94 insertions(+), 16 deletions(-)

diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 0e98ed048197..01d9c4463174 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -383,8 +383,6 @@ def visit_callable_type(self, t: CallableType) -> CallableType:
                 t = t.expand_param_spec(repl)
                 return t.copy_modified(
                     arg_types=self.expand_types(t.arg_types),
-                    arg_kinds=t.arg_kinds,
-                    arg_names=t.arg_names,
                     ret_type=t.ret_type.accept(self),
                     type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None),
                 )
@@ -402,6 +400,7 @@ def visit_callable_type(self, t: CallableType) -> CallableType:
                     arg_kinds=t.arg_kinds[:-2] + prefix.arg_kinds + t.arg_kinds[-2:],
                     arg_names=t.arg_names[:-2] + prefix.arg_names + t.arg_names[-2:],
                     ret_type=t.ret_type.accept(self),
+                    from_concatenate=t.from_concatenate or bool(repl.prefix.arg_types),
                 )
 
         var_arg = t.var_arg()
diff --git a/mypy/messages.py b/mypy/messages.py
index c9bf26f8952e..aab30ee29108 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -2116,9 +2116,11 @@ def report_protocol_problems(
             return
 
         # Report member type conflicts
-        conflict_types = get_conflict_protocol_types(subtype, supertype, class_obj=class_obj)
+        conflict_types = get_conflict_protocol_types(
+            subtype, supertype, class_obj=class_obj, options=self.options
+        )
         if conflict_types and (
-            not is_subtype(subtype, erase_type(supertype))
+            not is_subtype(subtype, erase_type(supertype), options=self.options)
             or not subtype.type.defn.type_vars
             or not supertype.type.defn.type_vars
         ):
@@ -2780,7 +2782,11 @@ def [T <: int] f(self, x: int, y: T) -> None
             slash = True
 
     # If we got a "special arg" (i.e: self, cls, etc...), prepend it to the arg list
-    if isinstance(tp.definition, FuncDef) and hasattr(tp.definition, "arguments"):
+    if (
+        isinstance(tp.definition, FuncDef)
+        and hasattr(tp.definition, "arguments")
+        and not tp.from_concatenate
+    ):
         definition_arg_names = [arg.variable.name for arg in tp.definition.arguments]
         if (
             len(definition_arg_names) > len(tp.arg_names)
@@ -2857,7 +2863,7 @@ def get_missing_protocol_members(left: Instance, right: Instance, skip: list[str
 
 
 def get_conflict_protocol_types(
-    left: Instance, right: Instance, class_obj: bool = False
+    left: Instance, right: Instance, class_obj: bool = False, options: Options | None = None
 ) -> list[tuple[str, Type, Type]]:
     """Find members that are defined in 'left' but have incompatible types.
     Return them as a list of ('member', 'got', 'expected').
@@ -2872,9 +2878,9 @@ def get_conflict_protocol_types(
         subtype = mypy.typeops.get_protocol_member(left, member, class_obj)
         if not subtype:
             continue
-        is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True)
+        is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True, options=options)
         if IS_SETTABLE in get_member_flags(member, right):
-            is_compat = is_compat and is_subtype(supertype, subtype)
+            is_compat = is_compat and is_subtype(supertype, subtype, options=options)
         if not is_compat:
             conflicts.append((member, subtype, supertype))
     return conflicts
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 60fccc7e357c..11847858c62c 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -600,7 +600,7 @@ def check_mixed(
                     type_state.record_negative_subtype_cache_entry(self._subtype_kind, left, right)
                 return nominal
             if right.type.is_protocol and is_protocol_implementation(
-                left, right, proper_subtype=self.proper_subtype
+                left, right, proper_subtype=self.proper_subtype, options=self.options
             ):
                 return True
             # We record negative cache entry here, and not in the protocol check like we do for
@@ -647,7 +647,7 @@ def visit_param_spec(self, left: ParamSpecType) -> bool:
             and right.id == left.id
             and right.flavor == left.flavor
         ):
-            return True
+            return self._is_subtype(left.prefix, right.prefix)
         if isinstance(right, Parameters) and are_trivial_parameters(right):
             return True
         return self._is_subtype(left.upper_bound, self.right)
@@ -696,7 +696,7 @@ def visit_callable_type(self, left: CallableType) -> bool:
                 ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names,
                 strict_concatenate=(self.options.extra_checks or self.options.strict_concatenate)
                 if self.options
-                else True,
+                else False,
             )
         elif isinstance(right, Overloaded):
             return all(self._is_subtype(left, item) for item in right.items)
@@ -863,7 +863,7 @@ def visit_overloaded(self, left: Overloaded) -> bool:
                         strict_concat = (
                             (self.options.extra_checks or self.options.strict_concatenate)
                             if self.options
-                            else True
+                            else False
                         )
                         if left_index not in matched_overloads and (
                             is_callable_compatible(
@@ -1003,6 +1003,7 @@ def is_protocol_implementation(
     proper_subtype: bool = False,
     class_obj: bool = False,
     skip: list[str] | None = None,
+    options: Options | None = None,
 ) -> bool:
     """Check whether 'left' implements the protocol 'right'.
 
@@ -1068,7 +1069,9 @@ def f(self) -> A: ...
                 # Nominal check currently ignores arg names
                 # NOTE: If we ever change this, be sure to also change the call to
                 # SubtypeVisitor.build_subtype_kind(...) down below.
-                is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=ignore_names)
+                is_compat = is_subtype(
+                    subtype, supertype, ignore_pos_arg_names=ignore_names, options=options
+                )
             else:
                 is_compat = is_proper_subtype(subtype, supertype)
             if not is_compat:
@@ -1080,7 +1083,7 @@ def f(self) -> A: ...
             superflags = get_member_flags(member, right)
             if IS_SETTABLE in superflags:
                 # Check opposite direction for settable attributes.
-                if not is_subtype(supertype, subtype):
+                if not is_subtype(supertype, subtype, options=options):
                     return False
             if not class_obj:
                 if IS_SETTABLE not in superflags:
@@ -1479,7 +1482,7 @@ def are_parameters_compatible(
     ignore_pos_arg_names: bool = False,
     check_args_covariantly: bool = False,
     allow_partial_overlap: bool = False,
-    strict_concatenate_check: bool = True,
+    strict_concatenate_check: bool = False,
 ) -> bool:
     """Helper function for is_callable_compatible, used for Parameter compatibility"""
     if right.is_ellipsis_args:
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index e59b12d47980..4a4c19b4a0e9 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -6483,7 +6483,7 @@ P = ParamSpec("P")
 R = TypeVar("R")
 
 @overload
-def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ...
+def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types
 @overload
 def func(x: Callable[P, R]) -> Callable[Concatenate[str, P], R]: ...
 def func(x: Callable[..., R]) -> Callable[..., R]: ...
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index f523cb005a2c..b06944389623 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1576,3 +1576,73 @@ def test() -> None: ...
 # TODO: avoid this error, although it may be non-trivial.
 apply(apply, test)  # E: Argument 2 to "apply" has incompatible type "Callable[[], None]"; expected "Callable[P, T]"
 [builtins fixtures/paramspec.pyi]
+
+[case testParamSpecPrefixSubtypingGenericInvalid]
+from typing import Generic
+from typing_extensions import ParamSpec, Concatenate
+
+P = ParamSpec("P")
+
+class A(Generic[P]):
+    def foo(self, *args: P.args, **kwargs: P.kwargs):
+        ...
+
+def bar(b: A[P]) -> A[Concatenate[int, P]]:
+    return b  # E: Incompatible return value type (got "A[P]", expected "A[[int, **P]]")
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecPrefixSubtypingProtocolInvalid]
+from typing import Protocol
+from typing_extensions import ParamSpec, Concatenate
+
+P = ParamSpec("P")
+
+class A(Protocol[P]):
+    def foo(self, *args: P.args, **kwargs: P.kwargs):
+        ...
+
+def bar(b: A[P]) -> A[Concatenate[int, P]]:
+    return b  # E: Incompatible return value type (got "A[P]", expected "A[[int, **P]]")
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecPrefixSubtypingValidNonStrict]
+from typing import Protocol
+from typing_extensions import ParamSpec, Concatenate
+
+P = ParamSpec("P")
+
+class A(Protocol[P]):
+    def foo(self, a: int, *args: P.args, **kwargs: P.kwargs):
+        ...
+
+class B(Protocol[P]):
+    def foo(self, a: int, b: int, *args: P.args, **kwargs: P.kwargs):
+        ...
+
+def bar(b: B[P]) -> A[Concatenate[int, P]]:
+    return b
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecPrefixSubtypingInvalidStrict]
+# flags: --extra-checks
+from typing import Protocol
+from typing_extensions import ParamSpec, Concatenate
+
+P = ParamSpec("P")
+
+class A(Protocol[P]):
+    def foo(self, a: int, *args: P.args, **kwargs: P.kwargs):
+        ...
+
+class B(Protocol[P]):
+    def foo(self, a: int, b: int, *args: P.args, **kwargs: P.kwargs):
+        ...
+
+def bar(b: B[P]) -> A[Concatenate[int, P]]:
+    return b  # E: Incompatible return value type (got "B[P]", expected "A[[int, **P]]") \
+              # N: Following member(s) of "B[P]" have conflicts: \
+              # N:     Expected: \
+              # N:         def foo(self, a: int, int, /, *args: P.args, **kwargs: P.kwargs) -> Any \
+              # N:     Got: \
+              # N:         def foo(self, a: int, b: int, *args: P.args, **kwargs: P.kwargs) -> Any
+[builtins fixtures/paramspec.pyi]

From fa84534b9a9c6bdfc2a155d2e916da0c308402b9 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 18 Aug 2023 14:24:41 +0100
Subject: [PATCH 060/288] Basic support for decorated overloads (#15898)

Fixes https://github.com/python/mypy/issues/15737
Fixes https://github.com/python/mypy/issues/12844
Fixes https://github.com/python/mypy/issues/12716

My goal was to fix the `ParamSpec` issues, but it turns out decorated
overloads were not supported at all. Namely:
* Decorators on overload items were ignored, caller would see original
undecorated item types
* Overload item overlap checks were performed for original types, while
arguably we should use decorated types
* Overload items completeness w.r.t. to implementation was checked with
decorated implementation, and undecorated items

Here I add basic support using same logic as for regular decorated
functions: initially set type to `None` and defer callers until
definition is type-checked. Note this results in few more `Cannot
determine type` in case of other errors, but I think it is fine.

Note I also add special-casing for "inline" applications of generic
functions to overload arguments. This use case was mentioned few times
alongside overloads. The general fix would be tricky, and my
special-casing should cover typical use cases.

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 mypy/checker.py                               | 91 ++++++++++++-------
 mypy/checkexpr.py                             | 73 +++++++++++++--
 mypy/checkmember.py                           | 12 ++-
 mypy/semanal.py                               | 11 ++-
 test-data/unit/check-generics.test            |  4 +-
 test-data/unit/check-newsemanal.test          |  3 +-
 test-data/unit/check-overloading.test         | 27 ++++++
 .../unit/check-parameter-specification.test   | 28 ++++++
 test-data/unit/lib-stub/functools.pyi         |  2 +-
 9 files changed, 206 insertions(+), 45 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 5d97a0dec713..7625bf28a88c 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -636,13 +636,30 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
             self.visit_decorator(defn.items[0])
         for fdef in defn.items:
             assert isinstance(fdef, Decorator)
-            self.check_func_item(fdef.func, name=fdef.func.name, allow_empty=True)
+            if defn.is_property:
+                self.check_func_item(fdef.func, name=fdef.func.name, allow_empty=True)
+            else:
+                # Perform full check for real overloads to infer type of all decorated
+                # overload variants.
+                self.visit_decorator_inner(fdef, allow_empty=True)
             if fdef.func.abstract_status in (IS_ABSTRACT, IMPLICITLY_ABSTRACT):
                 num_abstract += 1
         if num_abstract not in (0, len(defn.items)):
             self.fail(message_registry.INCONSISTENT_ABSTRACT_OVERLOAD, defn)
         if defn.impl:
             defn.impl.accept(self)
+        if not defn.is_property:
+            self.check_overlapping_overloads(defn)
+            if defn.type is None:
+                item_types = []
+                for item in defn.items:
+                    assert isinstance(item, Decorator)
+                    item_type = self.extract_callable_type(item.var.type, item)
+                    if item_type is not None:
+                        item_types.append(item_type)
+                if item_types:
+                    defn.type = Overloaded(item_types)
+        # Check override validity after we analyzed current definition.
         if defn.info:
             found_method_base_classes = self.check_method_override(defn)
             if (
@@ -653,10 +670,35 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
                 self.msg.no_overridable_method(defn.name, defn)
             self.check_explicit_override_decorator(defn, found_method_base_classes, defn.impl)
             self.check_inplace_operator_method(defn)
-        if not defn.is_property:
-            self.check_overlapping_overloads(defn)
         return None
 
+    def extract_callable_type(self, inner_type: Type | None, ctx: Context) -> CallableType | None:
+        """Get type as seen by an overload item caller."""
+        inner_type = get_proper_type(inner_type)
+        outer_type: CallableType | None = None
+        if inner_type is not None and not isinstance(inner_type, AnyType):
+            if isinstance(inner_type, CallableType):
+                outer_type = inner_type
+            elif isinstance(inner_type, Instance):
+                inner_call = get_proper_type(
+                    analyze_member_access(
+                        name="__call__",
+                        typ=inner_type,
+                        context=ctx,
+                        is_lvalue=False,
+                        is_super=False,
+                        is_operator=True,
+                        msg=self.msg,
+                        original_type=inner_type,
+                        chk=self,
+                    )
+                )
+                if isinstance(inner_call, CallableType):
+                    outer_type = inner_call
+            if outer_type is None:
+                self.msg.not_callable(inner_type, ctx)
+        return outer_type
+
     def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None:
         # At this point we should have set the impl already, and all remaining
         # items are decorators
@@ -680,40 +722,20 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None:
 
             # This can happen if we've got an overload with a different
             # decorator or if the implementation is untyped -- we gave up on the types.
-            inner_type = get_proper_type(inner_type)
-            if inner_type is not None and not isinstance(inner_type, AnyType):
-                if isinstance(inner_type, CallableType):
-                    impl_type = inner_type
-                elif isinstance(inner_type, Instance):
-                    inner_call = get_proper_type(
-                        analyze_member_access(
-                            name="__call__",
-                            typ=inner_type,
-                            context=defn.impl,
-                            is_lvalue=False,
-                            is_super=False,
-                            is_operator=True,
-                            msg=self.msg,
-                            original_type=inner_type,
-                            chk=self,
-                        )
-                    )
-                    if isinstance(inner_call, CallableType):
-                        impl_type = inner_call
-                if impl_type is None:
-                    self.msg.not_callable(inner_type, defn.impl)
+            impl_type = self.extract_callable_type(inner_type, defn.impl)
 
         is_descriptor_get = defn.info and defn.name == "__get__"
         for i, item in enumerate(defn.items):
-            # TODO overloads involving decorators
             assert isinstance(item, Decorator)
-            sig1 = self.function_type(item.func)
-            assert isinstance(sig1, CallableType)
+            sig1 = self.extract_callable_type(item.var.type, item)
+            if sig1 is None:
+                continue
 
             for j, item2 in enumerate(defn.items[i + 1 :]):
                 assert isinstance(item2, Decorator)
-                sig2 = self.function_type(item2.func)
-                assert isinstance(sig2, CallableType)
+                sig2 = self.extract_callable_type(item2.var.type, item2)
+                if sig2 is None:
+                    continue
 
                 if not are_argument_counts_overlapping(sig1, sig2):
                     continue
@@ -4751,17 +4773,20 @@ def visit_decorator(self, e: Decorator) -> None:
                     e.var.type = AnyType(TypeOfAny.special_form)
                     e.var.is_ready = True
                     return
+        self.visit_decorator_inner(e)
 
+    def visit_decorator_inner(self, e: Decorator, allow_empty: bool = False) -> None:
         if self.recurse_into_functions:
             with self.tscope.function_scope(e.func):
-                self.check_func_item(e.func, name=e.func.name)
+                self.check_func_item(e.func, name=e.func.name, allow_empty=allow_empty)
 
         # Process decorators from the inside out to determine decorated signature, which
         # may be different from the declared signature.
         sig: Type = self.function_type(e.func)
         for d in reversed(e.decorators):
             if refers_to_fullname(d, OVERLOAD_NAMES):
-                self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, e)
+                if not allow_empty:
+                    self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, e)
                 continue
             dec = self.expr_checker.accept(d)
             temp = self.temp_node(sig, context=e)
@@ -4788,6 +4813,8 @@ def visit_decorator(self, e: Decorator) -> None:
                     self.msg.fail("Too many arguments for property", e)
             self.check_incompatible_property_override(e)
         # For overloaded functions we already checked override for overload as a whole.
+        if allow_empty:
+            return
         if e.func.info and not e.func.is_dynamic() and not e.is_overload:
             found_method_base_classes = self.check_method_override(e)
             if (
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 68ea7c30ed6f..797473f7f58f 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -353,12 +353,13 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type:
         elif isinstance(node, FuncDef):
             # Reference to a global function.
             result = function_type(node, self.named_type("builtins.function"))
-        elif isinstance(node, OverloadedFuncDef) and node.type is not None:
-            # node.type is None when there are multiple definitions of a function
-            # and it's decorated by something that is not typing.overload
-            # TODO: use a dummy Overloaded instead of AnyType in this case
-            # like we do in mypy.types.function_type()?
-            result = node.type
+        elif isinstance(node, OverloadedFuncDef):
+            if node.type is None:
+                if self.chk.in_checked_function() and node.items:
+                    self.chk.handle_cannot_determine_type(node.name, e)
+                result = AnyType(TypeOfAny.from_error)
+            else:
+                result = node.type
         elif isinstance(node, TypeInfo):
             # Reference to a type object.
             if node.typeddict_type:
@@ -1337,6 +1338,55 @@ def transform_callee_type(
 
         return callee
 
+    def is_generic_decorator_overload_call(
+        self, callee_type: CallableType, args: list[Expression]
+    ) -> Overloaded | None:
+        """Check if this looks like an application of a generic function to overload argument."""
+        assert callee_type.variables
+        if len(callee_type.arg_types) != 1 or len(args) != 1:
+            # TODO: can we handle more general cases?
+            return None
+        if not isinstance(get_proper_type(callee_type.arg_types[0]), CallableType):
+            return None
+        if not isinstance(get_proper_type(callee_type.ret_type), CallableType):
+            return None
+        with self.chk.local_type_map():
+            with self.msg.filter_errors():
+                arg_type = get_proper_type(self.accept(args[0], type_context=None))
+        if isinstance(arg_type, Overloaded):
+            return arg_type
+        return None
+
+    def handle_decorator_overload_call(
+        self, callee_type: CallableType, overloaded: Overloaded, ctx: Context
+    ) -> tuple[Type, Type] | None:
+        """Type-check application of a generic callable to an overload.
+
+        We check call on each individual overload item, and then combine results into a new
+        overload. This function should be only used if callee_type takes and returns a Callable.
+        """
+        result = []
+        inferred_args = []
+        for item in overloaded.items:
+            arg = TempNode(typ=item)
+            with self.msg.filter_errors() as err:
+                item_result, inferred_arg = self.check_call(callee_type, [arg], [ARG_POS], ctx)
+            if err.has_new_errors():
+                # This overload doesn't match.
+                continue
+            p_item_result = get_proper_type(item_result)
+            if not isinstance(p_item_result, CallableType):
+                continue
+            p_inferred_arg = get_proper_type(inferred_arg)
+            if not isinstance(p_inferred_arg, CallableType):
+                continue
+            inferred_args.append(p_inferred_arg)
+            result.append(p_item_result)
+        if not result or not inferred_args:
+            # None of the overload matched (or overload was initially malformed).
+            return None
+        return Overloaded(result), Overloaded(inferred_args)
+
     def check_call_expr_with_callee_type(
         self,
         callee_type: Type,
@@ -1451,6 +1501,17 @@ def check_call(
         callee = get_proper_type(callee)
 
         if isinstance(callee, CallableType):
+            if callee.variables:
+                overloaded = self.is_generic_decorator_overload_call(callee, args)
+                if overloaded is not None:
+                    # Special casing for inline application of generic callables to overloads.
+                    # Supporting general case would be tricky, but this should cover 95% of cases.
+                    overloaded_result = self.handle_decorator_overload_call(
+                        callee, overloaded, context
+                    )
+                    if overloaded_result is not None:
+                        return overloaded_result
+
             return self.check_callable_call(
                 callee,
                 args,
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 343dfe3de243..2b0717f181a9 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -317,7 +317,17 @@ def analyze_instance_member_access(
             return analyze_var(name, first_item.var, typ, info, mx)
         if mx.is_lvalue:
             mx.msg.cant_assign_to_method(mx.context)
-        signature = function_type(method, mx.named_type("builtins.function"))
+        if not isinstance(method, OverloadedFuncDef):
+            signature = function_type(method, mx.named_type("builtins.function"))
+        else:
+            if method.type is None:
+                # Overloads may be not ready if they are decorated. Handle this in same
+                # manner as we would handle a regular decorated function: defer if possible.
+                if not mx.no_deferral and method.items:
+                    mx.not_ready_callback(method.name, mx.context)
+                return AnyType(TypeOfAny.special_form)
+            assert isinstance(method.type, Overloaded)
+            signature = method.type
         signature = freshen_all_functions_type_vars(signature)
         if not method.is_static:
             if name != "__call__":
diff --git a/mypy/semanal.py b/mypy/semanal.py
index e21fc9f1c23f..9d968d1da781 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -1153,7 +1153,16 @@ def analyze_overloaded_func_def(self, defn: OverloadedFuncDef) -> None:
             elif not non_overload_indexes:
                 self.handle_missing_overload_implementation(defn)
 
-        if types:
+        if types and not any(
+            # If some overload items are decorated with other decorators, then
+            # the overload type will be determined during type checking.
+            isinstance(it, Decorator) and len(it.decorators) > 1
+            for it in defn.items
+        ):
+            # TODO: should we enforce decorated overloads consistency somehow?
+            # Some existing code uses both styles:
+            #   * Put decorator only on implementation, use "effective" types in overloads
+            #   * Put decorator everywhere, use "bare" types in overloads.
             defn.type = Overloaded(types)
             defn.type.line = defn.line
 
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 8c7c4e035961..1fac42b492a8 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -3062,10 +3062,10 @@ def dec5(f: Callable[[int], T]) -> Callable[[int], List[T]]:
 reveal_type(dec1(lambda x: x))  # N: Revealed type is "def [T] (T`3) -> builtins.list[T`3]"
 reveal_type(dec2(lambda x: x))  # N: Revealed type is "def [S] (S`4) -> builtins.list[S`4]"
 reveal_type(dec3(lambda x: x[0]))  # N: Revealed type is "def [S] (S`6) -> S`6"
-reveal_type(dec4(lambda x: [x]))  # N: Revealed type is "def [S] (S`8) -> S`8"
+reveal_type(dec4(lambda x: [x]))  # N: Revealed type is "def [S] (S`9) -> S`9"
 reveal_type(dec1(lambda x: 1))  # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]"
 reveal_type(dec5(lambda x: x))  # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]"
-reveal_type(dec3(lambda x: x))  # N: Revealed type is "def [S] (S`15) -> builtins.list[S`15]"
+reveal_type(dec3(lambda x: x))  # N: Revealed type is "def [S] (S`16) -> builtins.list[S`16]"
 dec4(lambda x: x)  # E: Incompatible return value type (got "S", expected "List[object]")
 [builtins fixtures/list.pyi]
 
diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test
index 8300957ee511..ff8d346e74a1 100644
--- a/test-data/unit/check-newsemanal.test
+++ b/test-data/unit/check-newsemanal.test
@@ -3207,8 +3207,7 @@ class User:
         self.first_name = value
 
     def __init__(self, name: str) -> None:
-        self.name = name  # E: Cannot assign to a method \
-                          # E: Incompatible types in assignment (expression has type "str", variable has type "Callable[..., Any]")
+        self.name = name  # E: Cannot assign to a method
 
 [case testNewAnalyzerMemberNameMatchesTypedDict]
 from typing import Union, Any
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index 4a4c19b4a0e9..b778dc50b376 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -6613,3 +6613,30 @@ def struct(__cols: Union[List[S], Tuple[S, ...]]) -> int: ...
 def struct(*cols: Union[S, Union[List[S], Tuple[S, ...]]]) -> int:
     pass
 [builtins fixtures/tuple.pyi]
+
+[case testRegularGenericDecoratorOverload]
+from typing import Callable, overload, TypeVar, List
+
+S = TypeVar("S")
+T = TypeVar("T")
+def transform(func: Callable[[S], List[T]]) -> Callable[[S], T]: ...
+
+@overload
+def foo(x: int) -> List[float]: ...
+@overload
+def foo(x: str) -> List[str]: ...
+def foo(x): ...
+
+reveal_type(transform(foo))  # N: Revealed type is "Overload(def (builtins.int) -> builtins.float, def (builtins.str) -> builtins.str)"
+
+@transform
+@overload
+def bar(x: int) -> List[float]: ...
+@transform
+@overload
+def bar(x: str) -> List[str]: ...
+@transform
+def bar(x): ...
+
+reveal_type(bar)  # N: Revealed type is "Overload(def (builtins.int) -> builtins.float, def (builtins.str) -> builtins.str)"
+[builtins fixtures/paramspec.pyi]
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index b06944389623..3a8ecdf81c7d 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1646,3 +1646,31 @@ def bar(b: B[P]) -> A[Concatenate[int, P]]:
               # N:     Got: \
               # N:         def foo(self, a: int, b: int, *args: P.args, **kwargs: P.kwargs) -> Any
 [builtins fixtures/paramspec.pyi]
+
+[case testParamSpecDecoratorOverload]
+from typing import Callable, overload, TypeVar, List
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+T = TypeVar("T")
+def transform(func: Callable[P, List[T]]) -> Callable[P, T]: ...
+
+@overload
+def foo(x: int) -> List[float]: ...
+@overload
+def foo(x: str) -> List[str]: ...
+def foo(x): ...
+
+reveal_type(transform(foo))  # N: Revealed type is "Overload(def (x: builtins.int) -> builtins.float, def (x: builtins.str) -> builtins.str)"
+
+@transform
+@overload
+def bar(x: int) -> List[float]: ...
+@transform
+@overload
+def bar(x: str) -> List[str]: ...
+@transform
+def bar(x): ...
+
+reveal_type(bar)  # N: Revealed type is "Overload(def (x: builtins.int) -> builtins.float, def (x: builtins.str) -> builtins.str)"
+[builtins fixtures/paramspec.pyi]
diff --git a/test-data/unit/lib-stub/functools.pyi b/test-data/unit/lib-stub/functools.pyi
index 9e62a14c2f34..e665b2bad0c2 100644
--- a/test-data/unit/lib-stub/functools.pyi
+++ b/test-data/unit/lib-stub/functools.pyi
@@ -1,4 +1,4 @@
-from typing import Generic, TypeVar, Callable, Any, Mapping
+from typing import Generic, TypeVar, Callable, Any, Mapping, overload
 
 _T = TypeVar("_T")
 

From b02ddf1db45f6cd1b3a4cf0f40e768b36f5636a7 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 18 Aug 2023 16:18:06 +0100
Subject: [PATCH 061/288] Polymorphic inference: basic support for variadic
 types (#15879)

This is the fifth PR in the series started by #15287, and a last one for
the foreseeable future. This completes polymorphic inference
sufficiently for extensive experimentation, and enabling polymorphic
fallback by default.

Remaining items for which I am going to open follow-up issues:
* Enable `--new-type-inference` by default (should be done before
everything else in this list).
* Use polymorphic inference during unification.
* Use polymorphic inference as primary an only mechanism, rather than a
fallback if basic inference fails in some way.
* Move `apply_poly()` logic from `checkexpr.py` to `applytype.py` (this
one depends on everything above).
* Experiment with backtracking in the new solver.
* Experiment with universal quantification for types other that
`Callable` (btw we already have a hacky support for capturing a generic
function in an instance with `ParamSpec`).

Now some comments on the PR proper. First of all I decided to do some
clean-up of `TypeVarTuple` support, but added only strictly necessary
parts of the cleanup here. Everything else will be in follow up PR(s).
The polymorphic inference/solver/application is practically trivial
here, so here is my view on how I see large-scale structure of
`TypeVarTuple` implementation:
* There should be no special-casing in `applytype.py`, so I deleted
everything from there (as I did for `ParamSpec`) and complemented
`visit_callable_type()` in `expandtype.py`. Basically, `applytype.py`
should have three simple steps: validate substitutions (upper bounds,
values, argument kinds etc.); call `expand_type()`; update callable type
variables (currently we just reduce the number, but in future we may
also add variables there, see TODO that I added).
* The only valid positions for a variadic item (a.k.a. `UnpackType`) are
inside `Instance`s, `TupleType`s, and `CallableType`s. I like how there
is an agreement that for callables there should never be a prefix, and
instead prefix should be represented with regular positional arguments.
I think that ideally we should enforce this with an `assert` in
`CallableType` constructor (similar to how I did this for `ParamSpec`).
* Completing `expand_type()` should be a priority (since it describes
basic semantics of `TypeVarLikeType`s). I think I made good progress in
this direction. IIUC the only valid substitution for `*Ts` are
`TupleType.items`, `*tuple[X, ...]`, `Any`, and `<nothing>`, so it was
not hard.
* I propose to only allow `TupleType` (mostly for `semanal.py`, see item
below), plain `TypeVarTupleType`, and a homogeneous `tuple` instances
inside `UnpackType`. Supporting unions of those is not specified by the
PEP and support will likely be quite tricky to implement. Also I propose
to even eagerly expand type aliases to tuples (since there is no point
in supporting recursive types like `A = Tuple[int, *A]`).
* I propose to forcefully flatten nested `TupleType`s, there should be
no things like `Tuple[X1, *Tuple[X2, *Ts, Y2], Y1]` etc after semantic
analysis. (Similarly to how we always flatten `Parameters` for
`ParamSpec`, and how we flatten nested unions in `UnionType`
_constructor_). Currently we do the flattening/normalization of tuples
in `expand_type()` etc.
* I suspect `build_constraints_for_unpack()` may be broken, at least
when it was used for tuples and callables it did something wrong in few
cases I tested (and there are other symptoms I mentioned in a TODO). I
therefore re-implemented logic for callables/tuples using a separate
dedicated helper. I will investigate more later.

As I mentioned above I only implemented strictly minimal amount of the
above plan to make my tests pass, but still wanted to write this out to
see if there are any objections (or maybe I don't understand something).
If there are no objections to this plan, I will continue it in separate
PR(s). Btw, I like how with this plan we will have clear logical
parallels between `TypeVarTuple` implementation and (recently updated)
`ParamSpec` implementation.

---------

Co-authored-by: Ivan Levkivskyi <ilevkivskyi@hopper.com>
---
 mypy/applytype.py                       |  64 ++------
 mypy/checkexpr.py                       |  24 ++-
 mypy/constraints.py                     | 192 +++++++++++++++++++-----
 mypy/expandtype.py                      | 135 ++++++++---------
 mypy/solve.py                           |  37 +++--
 mypy/typeops.py                         |   3 +
 mypy/types.py                           |   7 +-
 mypy/typevartuples.py                   |  19 ---
 test-data/unit/check-generics.test      | 144 +++++++++++++++++-
 test-data/unit/check-typevar-tuple.test |  24 +--
 10 files changed, 440 insertions(+), 209 deletions(-)

diff --git a/mypy/applytype.py b/mypy/applytype.py
index 6abe7f0022f8..884be287e33d 100644
--- a/mypy/applytype.py
+++ b/mypy/applytype.py
@@ -3,15 +3,13 @@
 from typing import Callable, Sequence
 
 import mypy.subtypes
-from mypy.expandtype import expand_type, expand_unpack_with_variables
-from mypy.nodes import ARG_STAR, Context
+from mypy.expandtype import expand_type
+from mypy.nodes import Context
 from mypy.types import (
     AnyType,
     CallableType,
-    Instance,
     ParamSpecType,
     PartialType,
-    TupleType,
     Type,
     TypeVarId,
     TypeVarLikeType,
@@ -21,7 +19,6 @@
     UnpackType,
     get_proper_type,
 )
-from mypy.typevartuples import find_unpack_in_list, replace_starargs
 
 
 def get_target_type(
@@ -107,6 +104,8 @@ def apply_generic_arguments(
         if target_type is not None:
             id_to_type[tvar.id] = target_type
 
+    # TODO: validate arg_kinds/arg_names for ParamSpec and TypeVarTuple replacements,
+    # not just type variable bounds above.
     param_spec = callable.param_spec()
     if param_spec is not None:
         nt = id_to_type.get(param_spec.id)
@@ -122,55 +121,9 @@ def apply_generic_arguments(
     # Apply arguments to argument types.
     var_arg = callable.var_arg()
     if var_arg is not None and isinstance(var_arg.typ, UnpackType):
-        star_index = callable.arg_kinds.index(ARG_STAR)
-        callable = callable.copy_modified(
-            arg_types=(
-                [expand_type(at, id_to_type) for at in callable.arg_types[:star_index]]
-                + [callable.arg_types[star_index]]
-                + [expand_type(at, id_to_type) for at in callable.arg_types[star_index + 1 :]]
-            )
-        )
-
-        unpacked_type = get_proper_type(var_arg.typ.type)
-        if isinstance(unpacked_type, TupleType):
-            # Assuming for now that because we convert prefixes to positional arguments,
-            # the first argument is always an unpack.
-            expanded_tuple = expand_type(unpacked_type, id_to_type)
-            if isinstance(expanded_tuple, TupleType):
-                # TODO: handle the case where the tuple has an unpack. This will
-                # hit an assert below.
-                expanded_unpack = find_unpack_in_list(expanded_tuple.items)
-                if expanded_unpack is not None:
-                    callable = callable.copy_modified(
-                        arg_types=(
-                            callable.arg_types[:star_index]
-                            + [expanded_tuple]
-                            + callable.arg_types[star_index + 1 :]
-                        )
-                    )
-                else:
-                    callable = replace_starargs(callable, expanded_tuple.items)
-            else:
-                # TODO: handle the case for if we get a variable length tuple.
-                assert False, f"mypy bug: unimplemented case, {expanded_tuple}"
-        elif isinstance(unpacked_type, TypeVarTupleType):
-            expanded_tvt = expand_unpack_with_variables(var_arg.typ, id_to_type)
-            if isinstance(expanded_tvt, list):
-                for t in expanded_tvt:
-                    assert not isinstance(t, UnpackType)
-                callable = replace_starargs(callable, expanded_tvt)
-            else:
-                assert isinstance(expanded_tvt, Instance)
-                assert expanded_tvt.type.fullname == "builtins.tuple"
-                callable = callable.copy_modified(
-                    arg_types=(
-                        callable.arg_types[:star_index]
-                        + [expanded_tvt.args[0]]
-                        + callable.arg_types[star_index + 1 :]
-                    )
-                )
-        else:
-            assert False, "mypy bug: unhandled case applying unpack"
+        callable = expand_type(callable, id_to_type)
+        assert isinstance(callable, CallableType)
+        return callable.copy_modified(variables=[tv for tv in tvars if tv.id not in id_to_type])
     else:
         callable = callable.copy_modified(
             arg_types=[expand_type(at, id_to_type) for at in callable.arg_types]
@@ -183,6 +136,9 @@ def apply_generic_arguments(
         type_guard = None
 
     # The callable may retain some type vars if only some were applied.
+    # TODO: move apply_poly() logic from checkexpr.py here when new inference
+    # becomes universally used (i.e. in all passes + in unification).
+    # With this new logic we can actually *add* some new free variables.
     remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type]
 
     return callable.copy_modified(
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 797473f7f58f..420cfd990820 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -2373,11 +2373,15 @@ def check_argument_types(
                     ]
                     actual_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1)
 
-                    assert isinstance(orig_callee_arg_type, TupleType)
-                    assert orig_callee_arg_type.items
-                    callee_arg_types = orig_callee_arg_type.items
+                    # TODO: can we really assert this? What if formal is just plain Unpack[Ts]?
+                    assert isinstance(orig_callee_arg_type, UnpackType)
+                    assert isinstance(orig_callee_arg_type.type, ProperType) and isinstance(
+                        orig_callee_arg_type.type, TupleType
+                    )
+                    assert orig_callee_arg_type.type.items
+                    callee_arg_types = orig_callee_arg_type.type.items
                     callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (
-                        len(orig_callee_arg_type.items) - 1
+                        len(orig_callee_arg_type.type.items) - 1
                     )
                     expanded_tuple = True
 
@@ -5853,8 +5857,9 @@ def visit_param_spec(self, t: ParamSpecType) -> Type:
         return super().visit_param_spec(t)
 
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type:
-        # TODO: Support polymorphic apply for TypeVarTuple.
-        raise PolyTranslationError()
+        if t in self.poly_tvars and t not in self.bound_tvars:
+            raise PolyTranslationError()
+        return super().visit_type_var_tuple(t)
 
     def visit_type_alias_type(self, t: TypeAliasType) -> Type:
         if not t.args:
@@ -5888,7 +5893,6 @@ def visit_instance(self, t: Instance) -> Type:
                 return t.copy_modified(args=new_args)
         # There is the same problem with callback protocols as with aliases
         # (callback protocols are essentially more flexible aliases to callables).
-        # Note: consider supporting bindings in instances, e.g. LRUCache[[x: T], T].
         if t.args and t.type.is_protocol and t.type.protocol_members == ["__call__"]:
             if t.type in self.seen_aliases:
                 raise PolyTranslationError()
@@ -5923,6 +5927,12 @@ def __init__(self) -> None:
     def visit_type_var(self, t: TypeVarType) -> bool:
         return True
 
+    def visit_param_spec(self, t: ParamSpecType) -> bool:
+        return True
+
+    def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool:
+        return True
+
 
 def has_erased_component(t: Type | None) -> bool:
     return t is not None and t.accept(HasErasedComponentsQuery())
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 04c3378ce16b..26504ed06b3e 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -9,7 +9,16 @@
 from mypy.argmap import ArgTypeExpander
 from mypy.erasetype import erase_typevars
 from mypy.maptype import map_instance_to_supertype
-from mypy.nodes import ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, CONTRAVARIANT, COVARIANT, ArgKind
+from mypy.nodes import (
+    ARG_OPT,
+    ARG_POS,
+    ARG_STAR,
+    ARG_STAR2,
+    CONTRAVARIANT,
+    COVARIANT,
+    ArgKind,
+    TypeInfo,
+)
 from mypy.types import (
     TUPLE_LIKE_INSTANCE_NAMES,
     AnyType,
@@ -70,6 +79,8 @@ class Constraint:
     def __init__(self, type_var: TypeVarLikeType, op: int, target: Type) -> None:
         self.type_var = type_var.id
         self.op = op
+        # TODO: should we add "assert not isinstance(target, UnpackType)"?
+        # UnpackType is a synthetic type, and is never valid as a constraint target.
         self.target = target
         self.origin_type_var = type_var
         # These are additional type variables that should be solved for together with type_var.
@@ -940,17 +951,20 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                 if not template.is_ellipsis_args:
                     unpack_present = find_unpack_in_list(template.arg_types)
                     if unpack_present is not None:
-                        (
-                            unpack_constraints,
-                            cactual_args_t,
-                            template_args_t,
-                        ) = find_and_build_constraints_for_unpack(
-                            tuple(cactual.arg_types), tuple(template.arg_types), self.direction
+                        # We need to re-normalize args to the form they appear in tuples,
+                        # for callables we always pack the suffix inside another tuple.
+                        unpack = template.arg_types[unpack_present]
+                        assert isinstance(unpack, UnpackType)
+                        tuple_type = get_tuple_fallback_from_unpack(unpack)
+                        template_types = repack_callable_args(template, tuple_type)
+                        actual_types = repack_callable_args(cactual, tuple_type)
+                        # Now we can use the same general helper as for tuple types.
+                        unpack_constraints = build_constraints_for_simple_unpack(
+                            template_types, actual_types, neg_op(self.direction)
                         )
-                        template_args = list(template_args_t)
-                        cactual_args = list(cactual_args_t)
+                        template_args = []
+                        cactual_args = []
                         res.extend(unpack_constraints)
-                        assert len(template_args) == len(cactual_args)
                     else:
                         template_args = template.arg_types
                         cactual_args = cactual.arg_types
@@ -961,7 +975,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                     # branches), and in Callable vs Callable inference (two branches).
                     for t, a in zip(template_args, cactual_args):
                         # This avoids bogus constraints like T <: P.args
-                        if isinstance(a, ParamSpecType):
+                        if isinstance(a, (ParamSpecType, UnpackType)):
                             # TODO: can we infer something useful for *T vs P?
                             continue
                         # Negate direction due to function argument type contravariance.
@@ -1093,13 +1107,11 @@ def visit_tuple_type(self, template: TupleType) -> list[Constraint]:
                     return [Constraint(type_var=unpacked_type, op=self.direction, target=actual)]
                 else:
                     assert isinstance(actual, TupleType)
-                    (
-                        unpack_constraints,
-                        actual_items,
-                        template_items,
-                    ) = find_and_build_constraints_for_unpack(
-                        tuple(actual.items), tuple(template.items), self.direction
+                    unpack_constraints = build_constraints_for_simple_unpack(
+                        template.items, actual.items, self.direction
                     )
+                    actual_items: tuple[Type, ...] = ()
+                    template_items: tuple[Type, ...] = ()
                     res.extend(unpack_constraints)
             elif isinstance(actual, TupleType):
                 actual_items = tuple(actual.items)
@@ -1232,28 +1244,132 @@ def find_matching_overload_items(
     return res
 
 
-def find_and_build_constraints_for_unpack(
-    mapped: tuple[Type, ...], template: tuple[Type, ...], direction: int
-) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]:
-    mapped_prefix_len = find_unpack_in_list(mapped)
-    if mapped_prefix_len is not None:
-        mapped_suffix_len: int | None = len(mapped) - mapped_prefix_len - 1
+def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo | None:
+    """Get builtins.tuple type from available types to construct homogeneous tuples."""
+    tp = get_proper_type(unpack.type)
+    if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple":
+        return tp.type
+    if isinstance(tp, TypeVarTupleType):
+        return tp.tuple_fallback.type
+    if isinstance(tp, TupleType):
+        for base in tp.partial_fallback.type.mro:
+            if base.fullname == "builtins.tuple":
+                return base
+    return None
+
+
+def repack_callable_args(callable: CallableType, tuple_type: TypeInfo | None) -> list[Type]:
+    """Present callable with star unpack in a normalized form.
+
+    Since positional arguments cannot follow star argument, they are packed in a suffix,
+    while prefix is represented as individual positional args. We want to put all in a single
+    list with unpack in the middle, and prefix/suffix on the sides (as they would appear
+    in e.g. a TupleType).
+    """
+    if ARG_STAR not in callable.arg_kinds:
+        return callable.arg_types
+    star_index = callable.arg_kinds.index(ARG_STAR)
+    arg_types = callable.arg_types[:star_index]
+    star_type = callable.arg_types[star_index]
+    suffix_types = []
+    if not isinstance(star_type, UnpackType):
+        if tuple_type is not None:
+            # Re-normalize *args: X -> *args: *tuple[X, ...]
+            star_type = UnpackType(Instance(tuple_type, [star_type]))
+        else:
+            # This is unfortunate, something like tuple[Any, ...] would be better.
+            star_type = UnpackType(AnyType(TypeOfAny.from_error))
     else:
-        mapped_suffix_len = None
+        tp = get_proper_type(star_type.type)
+        if isinstance(tp, TupleType):
+            assert isinstance(tp.items[0], UnpackType)
+            star_type = tp.items[0]
+            suffix_types = tp.items[1:]
+    return arg_types + [star_type] + suffix_types
 
-    template_prefix_len = find_unpack_in_list(template)
-    assert template_prefix_len is not None
-    template_suffix_len = len(template) - template_prefix_len - 1
 
-    return build_constraints_for_unpack(
-        mapped,
-        mapped_prefix_len,
-        mapped_suffix_len,
-        template,
-        template_prefix_len,
-        template_suffix_len,
-        direction,
+def build_constraints_for_simple_unpack(
+    template_args: list[Type], actual_args: list[Type], direction: int
+) -> list[Constraint]:
+    """Infer constraints between two lists of types with variadic items.
+
+    This function is only supposed to be called when a variadic item is present in templates.
+    If there is no variadic item the actuals, we simply use split_with_prefix_and_suffix()
+    and infer prefix <: prefix, suffix <: suffix, variadic <: middle. If there is a variadic
+    item in the actuals we need to be more careful, only common prefix/suffix can generate
+    constraints, also we can only infer constraints for variadic template item, if template
+    prefix/suffix are shorter that actual ones, otherwise there may be partial overlap
+    between variadic items, for example if template prefix is longer:
+
+        templates: T1, T2, Ts, Ts, Ts, ...
+        actuals:   A1, As, As, As, ...
+
+    Note: this function can only be called for builtin variadic constructors: Tuple and Callable,
+    for Instances variance depends on position, and a much more complex function
+    build_constraints_for_unpack() should be used.
+    """
+    template_unpack = find_unpack_in_list(template_args)
+    assert template_unpack is not None
+    template_prefix = template_unpack
+    template_suffix = len(template_args) - template_prefix - 1
+
+    t_unpack = None
+    res = []
+
+    actual_unpack = find_unpack_in_list(actual_args)
+    if actual_unpack is None:
+        t_unpack = template_args[template_unpack]
+        if template_prefix + template_suffix > len(actual_args):
+            # These can't be subtypes of each-other, return fast.
+            assert isinstance(t_unpack, UnpackType)
+            if isinstance(t_unpack.type, TypeVarTupleType):
+                # Set TypeVarTuple to empty to improve error messages.
+                return [
+                    Constraint(
+                        t_unpack.type, direction, TupleType([], t_unpack.type.tuple_fallback)
+                    )
+                ]
+            else:
+                return []
+        common_prefix = template_prefix
+        common_suffix = template_suffix
+    else:
+        actual_prefix = actual_unpack
+        actual_suffix = len(actual_args) - actual_prefix - 1
+        common_prefix = min(template_prefix, actual_prefix)
+        common_suffix = min(template_suffix, actual_suffix)
+        if actual_prefix >= template_prefix and actual_suffix >= template_suffix:
+            # This is the only case where we can guarantee there will be no partial overlap.
+            t_unpack = template_args[template_unpack]
+
+    # Handle constraints from prefixes/suffixes first.
+    start, middle, end = split_with_prefix_and_suffix(
+        tuple(actual_args), common_prefix, common_suffix
     )
+    for t, a in zip(template_args[:common_prefix], start):
+        res.extend(infer_constraints(t, a, direction))
+    if common_suffix:
+        for t, a in zip(template_args[-common_suffix:], end):
+            res.extend(infer_constraints(t, a, direction))
+
+    if t_unpack is not None:
+        # Add constraint(s) for variadic item when possible.
+        assert isinstance(t_unpack, UnpackType)
+        tp = get_proper_type(t_unpack.type)
+        if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple":
+            # Homogeneous case *tuple[T, ...] <: [X, Y, Z, ...].
+            for a in middle:
+                # TODO: should we use union instead of join here?
+                if not isinstance(a, UnpackType):
+                    res.extend(infer_constraints(tp.args[0], a, direction))
+                else:
+                    a_tp = get_proper_type(a.type)
+                    # This is the case *tuple[T, ...] <: *tuple[A, ...].
+                    if isinstance(a_tp, Instance) and a_tp.type.fullname == "builtins.tuple":
+                        res.extend(infer_constraints(tp.args[0], a_tp.args[0], direction))
+        elif isinstance(tp, TypeVarTupleType):
+            res.append(Constraint(tp, direction, TupleType(list(middle), tp.tuple_fallback)))
+    return res
 
 
 def build_constraints_for_unpack(
@@ -1268,6 +1384,10 @@ def build_constraints_for_unpack(
     template_suffix_len: int,
     direction: int,
 ) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]:
+    # TODO: this function looks broken:
+    # a) it should take into account variances, but it doesn't
+    # b) it looks like both call sites always pass identical values to args (2, 3) and (5, 6)
+    # because after map_instance_to_supertype() both template and actual have same TypeInfo.
     if mapped_prefix_len is None:
         mapped_prefix_len = template_prefix_len
     if mapped_suffix_len is None:
@@ -1314,4 +1434,4 @@ def build_constraints_for_unpack(
             if len(template_unpack.items) == len(mapped_middle):
                 for template_arg, item in zip(template_unpack.items, mapped_middle):
                     res.extend(infer_constraints(template_arg, item, direction))
-    return (res, mapped_prefix + mapped_suffix, template_prefix + template_suffix)
+    return res, mapped_prefix + mapped_suffix, template_prefix + template_suffix
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 01d9c4463174..6f69e09936db 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -257,7 +257,7 @@ def visit_param_spec(self, t: ParamSpecType) -> Type:
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type:
         # Sometimes solver may need to expand a type variable with (a copy of) itself
         # (usually together with other TypeVars, but it is hard to filter out TypeVarTuples).
-        repl = self.variables[t.id]
+        repl = self.variables.get(t.id, t)
         if isinstance(repl, TypeVarTupleType):
             return repl
         raise NotImplementedError
@@ -269,45 +269,54 @@ def visit_unpack_type(self, t: UnpackType) -> Type:
         # Relevant sections that can call unpack should call expand_unpack()
         # instead.
         # However, if the item is a variadic tuple, we can simply carry it over.
+        # In particular, if we expand A[*tuple[T, ...]] with substitutions {T: str},
         # it is hard to assert this without getting proper type.
         return UnpackType(t.type.accept(self))
 
-    def expand_unpack(self, t: UnpackType) -> list[Type] | Instance | AnyType | None:
-        return expand_unpack_with_variables(t, self.variables)
+    def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType:
+        assert isinstance(t.type, TypeVarTupleType)
+        repl = get_proper_type(self.variables.get(t.type.id, t.type))
+        if isinstance(repl, TupleType):
+            return repl.items
+        elif (
+            isinstance(repl, Instance)
+            and repl.type.fullname == "builtins.tuple"
+            or isinstance(repl, TypeVarTupleType)
+        ):
+            return [UnpackType(typ=repl)]
+        elif isinstance(repl, (AnyType, UninhabitedType)):
+            # tuple[Any, ...] for Any would be better, but we don't have
+            # the type info to construct that type here.
+            return repl
+        else:
+            raise RuntimeError(f"Invalid type replacement to expand: {repl}")
 
     def visit_parameters(self, t: Parameters) -> Type:
         return t.copy_modified(arg_types=self.expand_types(t.arg_types))
 
+    # TODO: can we simplify this method? It is too long.
     def interpolate_args_for_unpack(
         self, t: CallableType, var_arg: UnpackType
     ) -> tuple[list[str | None], list[ArgKind], list[Type]]:
         star_index = t.arg_kinds.index(ARG_STAR)
 
-        # We have something like Unpack[Tuple[X1, X2, Unpack[Ts], Y1, Y2]]
         var_arg_type = get_proper_type(var_arg.type)
+        # We have something like Unpack[Tuple[Unpack[Ts], X1, X2]]
         if isinstance(var_arg_type, TupleType):
             expanded_tuple = var_arg_type.accept(self)
-            # TODO: handle the case that expanded_tuple is a variable length tuple.
             assert isinstance(expanded_tuple, ProperType) and isinstance(expanded_tuple, TupleType)
             expanded_items = expanded_tuple.items
         else:
+            # We have plain Unpack[Ts]
             expanded_items_res = self.expand_unpack(var_arg)
             if isinstance(expanded_items_res, list):
                 expanded_items = expanded_items_res
-            elif (
-                isinstance(expanded_items_res, Instance)
-                and expanded_items_res.type.fullname == "builtins.tuple"
-            ):
-                # TODO: We shouldnt't simply treat this as a *arg because of suffix handling
-                # (there cannot be positional args after a *arg)
+            else:
+                # We got Any or <nothing>
                 arg_types = (
-                    t.arg_types[:star_index]
-                    + [expanded_items_res.args[0]]
-                    + t.arg_types[star_index + 1 :]
+                    t.arg_types[:star_index] + [expanded_items_res] + t.arg_types[star_index + 1 :]
                 )
-                return (t.arg_names, t.arg_kinds, arg_types)
-            else:
-                return (t.arg_names, t.arg_kinds, t.arg_types)
+                return t.arg_names, t.arg_kinds, arg_types
 
         expanded_unpack_index = find_unpack_in_list(expanded_items)
         # This is the case where we just have Unpack[Tuple[X1, X2, X3]]
@@ -337,13 +346,14 @@ def interpolate_args_for_unpack(
             expanded_unpack = expanded_items[expanded_unpack_index]
             assert isinstance(expanded_unpack, UnpackType)
 
-            # Extract the typevartuple so we can get a tuple fallback from it.
+            # Extract the TypeVarTuple, so we can get a tuple fallback from it.
             expanded_unpacked_tvt = expanded_unpack.type
             if isinstance(expanded_unpacked_tvt, TypeVarTupleType):
                 fallback = expanded_unpacked_tvt.tuple_fallback
             else:
                 # This can happen when tuple[Any, ...] is used to "patch" a variadic
-                # generic type without type arguments provided.
+                # generic type without type arguments provided, or when substitution is
+                # homogeneous tuple.
                 assert isinstance(expanded_unpacked_tvt, ProperType)
                 assert isinstance(expanded_unpacked_tvt, Instance)
                 assert expanded_unpacked_tvt.type.fullname == "builtins.tuple"
@@ -354,18 +364,31 @@ def interpolate_args_for_unpack(
             arg_kinds = (
                 t.arg_kinds[:star_index] + [ARG_POS] * prefix_len + t.arg_kinds[star_index:]
             )
-            arg_types = (
-                self.expand_types(t.arg_types[:star_index])
-                + expanded_items[:prefix_len]
-                # Constructing the Unpack containing the tuple without the prefix.
-                + [
-                    UnpackType(TupleType(expanded_items[prefix_len:], fallback))
-                    if len(expanded_items) - prefix_len > 1
-                    else expanded_items[0]
-                ]
-                + self.expand_types(t.arg_types[star_index + 1 :])
-            )
-        return (arg_names, arg_kinds, arg_types)
+            if (
+                len(expanded_items) == 1
+                and isinstance(expanded_unpack.type, ProperType)
+                and isinstance(expanded_unpack.type, Instance)
+            ):
+                assert expanded_unpack.type.type.fullname == "builtins.tuple"
+                # Normalize *args: *tuple[X, ...] -> *args: X
+                arg_types = (
+                    self.expand_types(t.arg_types[:star_index])
+                    + [expanded_unpack.type.args[0]]
+                    + self.expand_types(t.arg_types[star_index + 1 :])
+                )
+            else:
+                arg_types = (
+                    self.expand_types(t.arg_types[:star_index])
+                    + expanded_items[:prefix_len]
+                    # Constructing the Unpack containing the tuple without the prefix.
+                    + [
+                        UnpackType(TupleType(expanded_items[prefix_len:], fallback))
+                        if len(expanded_items) - prefix_len > 1
+                        else expanded_items[prefix_len]
+                    ]
+                    + self.expand_types(t.arg_types[star_index + 1 :])
+                )
+        return arg_names, arg_kinds, arg_types
 
     def visit_callable_type(self, t: CallableType) -> CallableType:
         param_spec = t.param_spec()
@@ -430,7 +453,7 @@ def visit_overloaded(self, t: Overloaded) -> Type:
 
     def expand_types_with_unpack(
         self, typs: Sequence[Type]
-    ) -> list[Type] | AnyType | UninhabitedType | Instance:
+    ) -> list[Type] | AnyType | UninhabitedType:
         """Expands a list of types that has an unpack.
 
         In corner cases, this can return a type rather than a list, in which case this
@@ -444,15 +467,8 @@ def expand_types_with_unpack(
         for item in typs:
             if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType):
                 unpacked_items = self.expand_unpack(item)
-                if unpacked_items is None:
-                    # TODO: better error, something like tuple of unknown?
-                    return UninhabitedType()
-                elif isinstance(unpacked_items, Instance):
-                    if len(typs) == 1:
-                        return unpacked_items
-                    else:
-                        assert False, "Invalid unpack of variable length tuple"
-                elif isinstance(unpacked_items, AnyType):
+                if isinstance(unpacked_items, (AnyType, UninhabitedType)):
+                    # TODO: better error for <nothing>, something like tuple of unknown?
                     return unpacked_items
                 else:
                     items.extend(unpacked_items)
@@ -464,6 +480,14 @@ def expand_types_with_unpack(
     def visit_tuple_type(self, t: TupleType) -> Type:
         items = self.expand_types_with_unpack(t.items)
         if isinstance(items, list):
+            if len(items) == 1:
+                # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...]
+                item = items[0]
+                if isinstance(item, UnpackType):
+                    assert isinstance(item.type, ProperType)
+                    if isinstance(item.type, Instance):
+                        assert item.type.type.fullname == "builtins.tuple"
+                        return item.type
             fallback = t.partial_fallback.accept(self)
             assert isinstance(fallback, ProperType) and isinstance(fallback, Instance)
             return t.copy_modified(items=items, fallback=fallback)
@@ -509,6 +533,7 @@ def visit_type_alias_type(self, t: TypeAliasType) -> Type:
         # alias itself), so we just expand the arguments.
         args = self.expand_types_with_unpack(t.args)
         if isinstance(args, list):
+            # TODO: normalize if target is Tuple, and args are [*tuple[X, ...]]?
             return t.copy_modified(args=args)
         else:
             return args
@@ -520,34 +545,6 @@ def expand_types(self, types: Iterable[Type]) -> list[Type]:
         return a
 
 
-def expand_unpack_with_variables(
-    t: UnpackType, variables: Mapping[TypeVarId, Type]
-) -> list[Type] | Instance | AnyType | None:
-    """May return either a list of types to unpack to, any, or a single
-    variable length tuple. The latter may not be valid in all contexts.
-    """
-    if isinstance(t.type, TypeVarTupleType):
-        repl = get_proper_type(variables.get(t.type.id, t))
-        if isinstance(repl, TupleType):
-            return repl.items
-        elif isinstance(repl, Instance) and repl.type.fullname == "builtins.tuple":
-            return repl
-        elif isinstance(repl, AnyType):
-            # tuple[Any, ...] would be better, but we don't have
-            # the type info to construct that type here.
-            return repl
-        elif isinstance(repl, TypeVarTupleType):
-            return [UnpackType(typ=repl)]
-        elif isinstance(repl, UnpackType):
-            return [repl]
-        elif isinstance(repl, UninhabitedType):
-            return None
-        else:
-            raise NotImplementedError(f"Invalid type replacement to expand: {repl}")
-    else:
-        raise NotImplementedError(f"Invalid type to expand: {t.type}")
-
-
 @overload
 def expand_self_type(var: Var, typ: ProperType, replacement: ProperType) -> ProperType:
     ...
diff --git a/mypy/solve.py b/mypy/solve.py
index 4b2b899c2a8d..5945d97ed85a 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -3,7 +3,7 @@
 from __future__ import annotations
 
 from collections import defaultdict
-from typing import Iterable, Sequence
+from typing import Iterable, Sequence, Tuple
 from typing_extensions import TypeAlias as _TypeAlias
 
 from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints
@@ -19,13 +19,16 @@
     NoneType,
     ParamSpecType,
     ProperType,
+    TupleType,
     Type,
     TypeOfAny,
     TypeVarId,
     TypeVarLikeType,
+    TypeVarTupleType,
     TypeVarType,
     UninhabitedType,
     UnionType,
+    UnpackType,
     get_proper_type,
 )
 from mypy.typestate import type_state
@@ -330,6 +333,23 @@ def is_trivial_bound(tp: ProperType) -> bool:
     return isinstance(tp, Instance) and tp.type.fullname == "builtins.object"
 
 
+def find_linear(c: Constraint) -> Tuple[bool, TypeVarId | None]:
+    """Find out if this constraint represent a linear relationship, return target id if yes."""
+    if isinstance(c.origin_type_var, TypeVarType):
+        if isinstance(c.target, TypeVarType):
+            return True, c.target.id
+    if isinstance(c.origin_type_var, ParamSpecType):
+        if isinstance(c.target, ParamSpecType) and not c.target.prefix.arg_types:
+            return True, c.target.id
+    if isinstance(c.origin_type_var, TypeVarTupleType):
+        target = get_proper_type(c.target)
+        if isinstance(target, TupleType) and len(target.items) == 1:
+            item = target.items[0]
+            if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType):
+                return True, item.type.id
+    return False, None
+
+
 def transitive_closure(
     tvars: list[TypeVarId], constraints: list[Constraint]
 ) -> tuple[Graph, Bounds, Bounds]:
@@ -361,16 +381,15 @@ def transitive_closure(
         c = remaining.pop()
         # Note that ParamSpec constraint P <: Q may be considered linear only if Q has no prefix,
         # for cases like P <: Concatenate[T, Q] we should consider this non-linear and put {P} and
-        # {T, Q} into separate SCCs.
-        if (
-            isinstance(c.target, TypeVarType)
-            or isinstance(c.target, ParamSpecType)
-            and not c.target.prefix.arg_types
-        ) and c.target.id in tvars:
+        # {T, Q} into separate SCCs. Similarly, Ts <: Tuple[*Us] considered linear, while
+        # Ts <: Tuple[*Us, U] is non-linear.
+        is_linear, target_id = find_linear(c)
+        if is_linear and target_id in tvars:
+            assert target_id is not None
             if c.op == SUBTYPE_OF:
-                lower, upper = c.type_var, c.target.id
+                lower, upper = c.type_var, target_id
             else:
-                lower, upper = c.target.id, c.type_var
+                lower, upper = target_id, c.type_var
             if (lower, upper) in graph:
                 continue
             graph |= {
diff --git a/mypy/typeops.py b/mypy/typeops.py
index d746ea701fde..22dbd9e9f42e 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -973,6 +973,9 @@ def visit_type_var(self, t: TypeVarType) -> list[TypeVarLikeType]:
     def visit_param_spec(self, t: ParamSpecType) -> list[TypeVarLikeType]:
         return [t] if self.include_all else []
 
+    def visit_type_var_tuple(self, t: TypeVarTupleType) -> list[TypeVarLikeType]:
+        return [t] if self.include_all else []
+
 
 def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool:
     """Does this type have a custom special method such as __format__() or __eq__()?
diff --git a/mypy/types.py b/mypy/types.py
index 359ca713616b..d4e2fc7cb63c 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1045,7 +1045,8 @@ class UnpackType(ProperType):
     or unpacking * syntax.
 
     The inner type should be either a TypeVarTuple, a constant size
-    tuple, or a variable length tuple, or a union of one of those.
+    tuple, or a variable length tuple. Type aliases to these are not allowed,
+    except during semantic analysis.
     """
 
     __slots__ = ["type"]
@@ -2260,6 +2261,10 @@ def __init__(
     ) -> None:
         super().__init__(line, column)
         self.partial_fallback = fallback
+        # TODO: flatten/normalize unpack items (very similar to unions) here.
+        # Probably also for instances, type aliases, callables, and Unpack itself. For example,
+        # tuple[*tuple[X, ...], ...] -> tuple[X, ...] and Tuple[*tuple[X, ...]] -> tuple[X, ...].
+        # Currently normalization happens in expand_type() et al., which is sub-optimal.
         self.items = items
         self.implicit = implicit
 
diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py
index ac5f4e43c3bf..29c800140eec 100644
--- a/mypy/typevartuples.py
+++ b/mypy/typevartuples.py
@@ -4,9 +4,7 @@
 
 from typing import Sequence
 
-from mypy.nodes import ARG_POS, ARG_STAR
 from mypy.types import (
-    CallableType,
     Instance,
     ProperType,
     Type,
@@ -179,20 +177,3 @@ def extract_unpack(types: Sequence[Type]) -> ProperType | None:
         if isinstance(types[0], UnpackType):
             return get_proper_type(types[0].type)
     return None
-
-
-def replace_starargs(callable: CallableType, types: list[Type]) -> CallableType:
-    star_index = callable.arg_kinds.index(ARG_STAR)
-    arg_kinds = (
-        callable.arg_kinds[:star_index]
-        + [ARG_POS] * len(types)
-        + callable.arg_kinds[star_index + 1 :]
-    )
-    arg_names = (
-        callable.arg_names[:star_index]
-        + [None] * len(types)
-        + callable.arg_names[star_index + 1 :]
-    )
-    arg_types = callable.arg_types[:star_index] + types + callable.arg_types[star_index + 1 :]
-
-    return callable.copy_modified(arg_types=arg_types, arg_names=arg_names, arg_kinds=arg_kinds)
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 1fac42b492a8..95a7bdd2b2cd 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -3144,7 +3144,7 @@ def pair(x: U) -> Callable[[V], Tuple[V, U]]: ...
 reveal_type(dec(id))  # N: Revealed type is "def [T] (T`2) -> T`2"
 reveal_type(dec(either))  # N: Revealed type is "def [T] (T`5, x: T`5) -> T`5"
 reveal_type(dec(pair))  # N: Revealed type is "def [T, U] (T`8, x: U`-1) -> Tuple[T`8, U`-1]"
-# This is counter-intuitive but looks correct, dec matches itself only if P is empty
+# This is counter-intuitive but looks correct, dec matches itself only if P can be empty
 reveal_type(dec(dec))  # N: Revealed type is "def [T, S] (T`11, f: def () -> def (T`11) -> S`12) -> S`12"
 [builtins fixtures/list.pyi]
 
@@ -3179,7 +3179,6 @@ P = ParamSpec('P')
 Q = ParamSpec('Q')
 
 class Foo(Generic[P]): ...
-class Bar(Generic[P, T]): ...
 
 def dec(f: Callable[P, int]) -> Callable[P, Foo[P]]: ...
 h: Callable[Concatenate[T, Q], int]
@@ -3263,3 +3262,144 @@ def pop_off(fn: Callable[Concatenate[T1, P], T2]) -> Callable[P, Callable[[T1],
 def test(command: Foo[Q]) -> Foo[Q]: ...
 reveal_type(test)  # N: Revealed type is "def () -> def [Q] (__main__.Foo[Q`-1]) -> __main__.Foo[Q`-1]"
 [builtins fixtures/tuple.pyi]
+
+[case testInferenceAgainstGenericVariadicBasicInList]
+# flags: --new-type-inference
+from typing import Tuple, TypeVar, List, Callable
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+Ts = TypeVarTuple("Ts")
+def dec(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], List[T]]: ...
+
+U = TypeVar("U")
+V = TypeVar("V")
+def id(x: U) -> U: ...
+def either(x: U, y: U) -> U: ...
+def pair(x: U, y: V) -> Tuple[U, V]: ...
+
+reveal_type(dec(id))  # N: Revealed type is "def [T] (T`2) -> builtins.list[T`2]"
+reveal_type(dec(either))  # N: Revealed type is "def [T] (T`4, T`4) -> builtins.list[T`4]"
+reveal_type(dec(pair))  # N: Revealed type is "def [U, V] (U`-1, V`-2) -> builtins.list[Tuple[U`-1, V`-2]]"
+[builtins fixtures/tuple.pyi]
+
+[case testInferenceAgainstGenericVariadicBasicDeList]
+# flags: --new-type-inference
+from typing import Tuple, TypeVar, List, Callable
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+Ts = TypeVarTuple("Ts")
+def dec(f: Callable[[Unpack[Ts]], List[T]]) -> Callable[[Unpack[Ts]], T]: ...
+
+U = TypeVar("U")
+V = TypeVar("V")
+def id(x: U) -> U: ...
+def either(x: U, y: U) -> U: ...
+
+reveal_type(dec(id))  # N: Revealed type is "def [T] (builtins.list[T`2]) -> T`2"
+reveal_type(dec(either))  # N: Revealed type is "def [T] (builtins.list[T`4], builtins.list[T`4]) -> T`4"
+[builtins fixtures/tuple.pyi]
+
+[case testInferenceAgainstGenericVariadicPopOff]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Tuple
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+def dec(f: Callable[[T, Unpack[Ts]], S]) -> Callable[[Unpack[Ts]], Callable[[T], S]]: ...
+
+U = TypeVar("U")
+V = TypeVar("V")
+def id(x: U) -> U: ...
+def either(x: U, y: U) -> U: ...
+def pair(x: U, y: V) -> Tuple[U, V]: ...
+
+reveal_type(dec(id))  # N: Revealed type is "def () -> def [T] (T`1) -> T`1"
+reveal_type(dec(either))  # N: Revealed type is "def [T] (T`4) -> def (T`4) -> T`4"
+reveal_type(dec(pair))  # N: Revealed type is "def [V] (V`-2) -> def [T] (T`7) -> Tuple[T`7, V`-2]"
+reveal_type(dec(dec))  # N: Revealed type is "def () -> def [T, Ts, S] (def (T`-1, *Unpack[Ts`-2]) -> S`-3) -> def (*Unpack[Ts`-2]) -> def (T`-1) -> S`-3"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericVariadicPopOn]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Tuple
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+def dec(f: Callable[[Unpack[Ts]], Callable[[T], S]]) -> Callable[[T, Unpack[Ts]], S]: ...
+
+U = TypeVar("U")
+V = TypeVar("V")
+def id() -> Callable[[U], U]: ...
+def either(x: U) -> Callable[[U], U]: ...
+def pair(x: U) -> Callable[[V], Tuple[V, U]]: ...
+
+reveal_type(dec(id))  # N: Revealed type is "def [T] (T`2) -> T`2"
+reveal_type(dec(either))  # N: Revealed type is "def [T] (T`5, T`5) -> T`5"
+reveal_type(dec(pair))  # N: Revealed type is "def [T, U] (T`8, U`-1) -> Tuple[T`8, U`-1]"
+# This is counter-intuitive but looks correct, dec matches itself only if Ts is empty
+reveal_type(dec(dec))  # N: Revealed type is "def [T, S] (T`11, def () -> def (T`11) -> S`12) -> S`12"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericVariadicVsVariadic]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, List, Generic
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+Us = TypeVarTuple("Us")
+
+class Foo(Generic[Unpack[Ts]]): ...
+class Bar(Generic[Unpack[Ts], T]): ...
+
+def dec(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], List[T]]: ...
+# TODO: do not crash on Foo[Us] (with missing Unpack), instead give an error.
+def f(*args: Unpack[Us]) -> Foo[Unpack[Us]]: ...
+reveal_type(dec(f))  # N: Revealed type is "def [Ts] (*Unpack[Ts`1]) -> builtins.list[__main__.Foo[Unpack[Ts`1]]]"
+g: Callable[[Unpack[Us]], Foo[Unpack[Us]]]
+reveal_type(dec(g))  # N: Revealed type is "def [Ts] (*Unpack[Ts`3]) -> builtins.list[__main__.Foo[Unpack[Ts`3]]]"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericVariadicVsVariadicConcatenate]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, Generic
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+Us = TypeVarTuple("Us")
+
+class Foo(Generic[Unpack[Ts]]): ...
+
+def dec(f: Callable[[Unpack[Ts]], int]) -> Callable[[Unpack[Ts]], Foo[Unpack[Ts]]]: ...
+h: Callable[[T, Unpack[Us]], int]
+g: Callable[[T, Unpack[Us]], int]
+h = g
+reveal_type(dec(h))  # N: Revealed type is "def [T, Us] (T`-1, *Unpack[Us`-2]) -> __main__.Foo[T`-1, Unpack[Us`-2]]"
+[builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericVariadicSecondary]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, Generic
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+Ts = TypeVarTuple("Ts")
+Us = TypeVarTuple("Us")
+
+class Foo(Generic[Unpack[Ts]]): ...
+
+def dec(f: Callable[[Unpack[Ts]], Foo[Unpack[Ts]]]) -> Callable[[Unpack[Ts]], Foo[Unpack[Ts]]]: ...
+g: Callable[[T], Foo[int]]
+reveal_type(dec(g))  # N: Revealed type is "def (builtins.int) -> __main__.Foo[builtins.int]"
+h: Callable[[Unpack[Us]], Foo[int]]
+reveal_type(dec(g))  # N: Revealed type is "def (builtins.int) -> __main__.Foo[builtins.int]"
+[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index e822cea9304f..b28b2ead45e7 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -479,18 +479,18 @@ vargs: Tuple[int, ...]
 vargs_str: Tuple[str, ...]
 
 call(target=func, args=(0, 'foo'))
-call(target=func, args=('bar', 'foo'))  # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[object, str], None]"
-call(target=func, args=(True, 'foo', 0))  # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]"
-call(target=func, args=(0, 0, 'foo'))  # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]"
-call(target=func, args=vargs)  # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]"
+call(target=func, args=('bar', 'foo'))  # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[str, str], None]"
+call(target=func, args=(True, 'foo', 0))  # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[bool, str, int], None]"
+call(target=func, args=(0, 0, 'foo'))  # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int, int, str], None]"
+call(target=func, args=vargs)  # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(int)], None]"
 
 # NOTE: This behavior may be a bit contentious, it is maybe inconsistent with our handling of
 # PEP646 but consistent with our handling of callable constraints.
 call(target=func2, args=vargs)  # E: Argument "target" to "call" has incompatible type "Callable[[int, int], None]"; expected "Callable[[VarArg(int)], None]"
 call(target=func3, args=vargs)
 call(target=func3, args=(0,1))
-call(target=func3, args=(0,'foo'))  # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]"
-call(target=func3, args=vargs_str)  # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]"
+call(target=func3, args=(0,'foo'))  # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[int, str], None]"
+call(target=func3, args=vargs_str)  # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(str)], None]"
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTuplePep646CallableWithPrefixSuffix]
@@ -561,11 +561,11 @@ class A:
 vargs: Tuple[int, ...]
 vargs_str: Tuple[str, ...]
 
-call(A().func)  # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]"
+call(A().func)  # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[], None]"
 call(A().func, 0, 'foo')
-call(A().func, 0, 'foo', 0)  # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]"
-call(A().func, 0)  # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]"
-call(A().func, 0, 1)  # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int, object], None]"
+call(A().func, 0, 'foo', 0)  # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int, str, int], None]"
+call(A().func, 0)  # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int], None]"
+call(A().func, 0, 1)  # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int, int], None]"
 call(A().func2, 0, 0)
 call(A().func3, 0, 1, 2)
 call(A().func3)
@@ -689,7 +689,7 @@ reveal_type(x)  # N: Revealed type is "builtins.list[Tuple[Any, Unpack[builtins.
 
 B = Callable[[T, Unpack[Ts]], int]
 y: B
-reveal_type(y)  # N: Revealed type is "def (Any, *Unpack[builtins.tuple[Any, ...]]) -> builtins.int"
+reveal_type(y)  # N: Revealed type is "def (Any, *Any) -> builtins.int"
 
 C = G[T, Unpack[Ts], T]
 z: C
@@ -711,7 +711,7 @@ reveal_type(x)  # N: Revealed type is "builtins.list[Tuple[Any, Unpack[builtins.
 
 B = Callable[[T, S, Unpack[Ts]], int]
 y: B[int]  # E: Bad number of arguments for type alias, expected: at least 2, given: 1
-reveal_type(y)  # N: Revealed type is "def (Any, Any, *Unpack[builtins.tuple[Any, ...]]) -> builtins.int"
+reveal_type(y)  # N: Revealed type is "def (Any, Any, *Any) -> builtins.int"
 
 C = G[T, Unpack[Ts], S]
 z: C[int]  # E: Bad number of arguments for type alias, expected: at least 2, given: 1

From 1db3eb3c991b82c2b0f9b7bd0a2547e140dd402a Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 19 Aug 2023 14:49:39 +0100
Subject: [PATCH 062/288] Allow Ellipsis in Concatenate; cleanup ParamSpec
 literals (#15905)

Fixes https://github.com/python/mypy/issues/14761
Fixes https://github.com/python/mypy/issues/15318
Fixes https://github.com/python/mypy/issues/14656
Fixes https://github.com/python/mypy/issues/13518

I noticed there is a bunch of inconsistencies in `semanal`/`typeanal`
for ParamSpecs, so I decided do a small cleanup. Using this opportunity
I also allow `Concatenate[int, ...]` (with literal Ellipsis), and reduce
verbosity of some errors.

cc @A5rocks
---
 mypy/semanal.py                               | 14 ++--
 mypy/typeanal.py                              | 54 ++++++++++----
 test-data/unit/check-literal.test             |  3 +-
 .../unit/check-parameter-specification.test   | 71 ++++++++++++++++++-
 test-data/unit/check-typevar-defaults.test    |  6 +-
 test-data/unit/semanal-errors.test            |  8 +--
 6 files changed, 123 insertions(+), 33 deletions(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index 9d968d1da781..ef66c9276664 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -5285,20 +5285,18 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None:
         else:
             items = [index]
 
-        # whether param spec literals be allowed here
-        # TODO: should this be computed once and passed in?
-        #   or is there a better way to do this?
+        # TODO: this needs a clean-up.
+        # Probably always allow Parameters literals, and validate in semanal_typeargs.py
         base = expr.base
         if isinstance(base, RefExpr) and isinstance(base.node, TypeAlias):
             alias = base.node
-            target = get_proper_type(alias.target)
-            if isinstance(target, Instance):
-                has_param_spec = target.type.has_param_spec_type
-                num_args = len(target.type.type_vars)
+            if any(isinstance(t, ParamSpecType) for t in alias.alias_tvars):
+                has_param_spec = True
+                num_args = len(alias.alias_tvars)
             else:
                 has_param_spec = False
                 num_args = -1
-        elif isinstance(base, NameExpr) and isinstance(base.node, TypeInfo):
+        elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo):
             has_param_spec = base.node.has_param_spec_type
             num_args = len(base.node.type_vars)
         else:
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 8ac73cdf8aac..b15b5c7654ba 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -226,6 +226,8 @@ def __init__(
         self.allow_required = allow_required
         # Are we in a context where ParamSpec literals are allowed?
         self.allow_param_spec_literals = allow_param_spec_literals
+        # Are we in context where literal "..." specifically is allowed?
+        self.allow_ellipsis = False
         # Should we report an error whenever we encounter a RawExpressionType outside
         # of a Literal context: e.g. whenever we encounter an invalid type? Normally,
         # we want to report an error, but the caller may want to do more specialized
@@ -461,9 +463,9 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type:
             self.api.fail("Concatenate needs type arguments", t, code=codes.VALID_TYPE)
             return AnyType(TypeOfAny.from_error)
 
-        # last argument has to be ParamSpec
-        ps = self.anal_type(t.args[-1], allow_param_spec=True)
-        if not isinstance(ps, ParamSpecType):
+        # Last argument has to be ParamSpec or Ellipsis.
+        ps = self.anal_type(t.args[-1], allow_param_spec=True, allow_ellipsis=True)
+        if not isinstance(ps, (ParamSpecType, Parameters)):
             if isinstance(ps, UnboundType) and self.allow_unbound_tvars:
                 sym = self.lookup_qualified(ps.name, t)
                 if sym is not None and isinstance(sym.node, ParamSpecExpr):
@@ -477,11 +479,11 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type:
 
         # TODO: this may not work well with aliases, if those worked.
         #   Those should be special-cased.
-        elif ps.prefix.arg_types:
+        elif isinstance(ps, ParamSpecType) and ps.prefix.arg_types:
             self.api.fail("Nested Concatenates are invalid", t, code=codes.VALID_TYPE)
 
         args = self.anal_array(t.args[:-1])
-        pre = ps.prefix
+        pre = ps.prefix if isinstance(ps, ParamSpecType) else ps
 
         # mypy can't infer this :(
         names: list[str | None] = [None] * len(args)
@@ -489,7 +491,7 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type:
         pre = Parameters(
             args + pre.arg_types, [ARG_POS] * len(args) + pre.arg_kinds, names + pre.arg_names
         )
-        return ps.copy_modified(prefix=pre)
+        return ps.copy_modified(prefix=pre) if isinstance(ps, ParamSpecType) else pre
 
     def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Type | None:
         """Bind special type that is recognized through magic name such as 'typing.Any'.
@@ -880,7 +882,7 @@ def visit_deleted_type(self, t: DeletedType) -> Type:
         return t
 
     def visit_type_list(self, t: TypeList) -> Type:
-        # paramspec literal (Z[[int, str, Whatever]])
+        # Parameters literal (Z[[int, str, Whatever]])
         if self.allow_param_spec_literals:
             params = self.analyze_callable_args(t)
             if params:
@@ -893,7 +895,8 @@ def visit_type_list(self, t: TypeList) -> Type:
             self.fail(
                 'Bracketed expression "[...]" is not valid as a type', t, code=codes.VALID_TYPE
             )
-            self.note('Did you mean "List[...]"?', t)
+            if len(t.items) == 1:
+                self.note('Did you mean "List[...]"?', t)
             return AnyType(TypeOfAny.from_error)
 
     def visit_callable_argument(self, t: CallableArgument) -> Type:
@@ -1106,7 +1109,7 @@ def visit_partial_type(self, t: PartialType) -> Type:
         assert False, "Internal error: Unexpected partial type"
 
     def visit_ellipsis_type(self, t: EllipsisType) -> Type:
-        if self.allow_param_spec_literals:
+        if self.allow_ellipsis or self.allow_param_spec_literals:
             any_type = AnyType(TypeOfAny.explicit)
             return Parameters(
                 [any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None], is_ellipsis_args=True
@@ -1174,7 +1177,7 @@ def analyze_callable_args_for_paramspec(
 
     def analyze_callable_args_for_concatenate(
         self, callable_args: Type, ret_type: Type, fallback: Instance
-    ) -> CallableType | None:
+    ) -> CallableType | AnyType | None:
         """Construct a 'Callable[C, RET]', where C is Concatenate[..., P], returning None if we
         cannot.
         """
@@ -1189,7 +1192,7 @@ def analyze_callable_args_for_concatenate(
             return None
 
         tvar_def = self.anal_type(callable_args, allow_param_spec=True)
-        if not isinstance(tvar_def, ParamSpecType):
+        if not isinstance(tvar_def, (ParamSpecType, Parameters)):
             if self.allow_unbound_tvars and isinstance(tvar_def, UnboundType):
                 sym = self.lookup_qualified(tvar_def.name, callable_args)
                 if sym is not None and isinstance(sym.node, ParamSpecExpr):
@@ -1198,7 +1201,18 @@ def analyze_callable_args_for_concatenate(
                     return callable_with_ellipsis(
                         AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback
                     )
-            return None
+            # Error was already given, so prevent further errors.
+            return AnyType(TypeOfAny.from_error)
+        if isinstance(tvar_def, Parameters):
+            # This comes from Concatenate[int, ...]
+            return CallableType(
+                arg_types=tvar_def.arg_types,
+                arg_names=tvar_def.arg_names,
+                arg_kinds=tvar_def.arg_kinds,
+                ret_type=ret_type,
+                fallback=fallback,
+                from_concatenate=True,
+            )
 
         # ick, CallableType should take ParamSpecType
         prefix = tvar_def.prefix
@@ -1257,7 +1271,7 @@ def analyze_callable_type(self, t: UnboundType) -> Type:
                     ) or self.analyze_callable_args_for_concatenate(
                         callable_args, ret_type, fallback
                     )
-                    if maybe_ret:
+                    if isinstance(maybe_ret, CallableType):
                         maybe_ret = maybe_ret.copy_modified(
                             ret_type=ret_type.accept(self), variables=variables
                         )
@@ -1274,6 +1288,8 @@ def analyze_callable_type(self, t: UnboundType) -> Type:
                         t,
                     )
                     return AnyType(TypeOfAny.from_error)
+                elif isinstance(maybe_ret, AnyType):
+                    return maybe_ret
                 ret = maybe_ret
         else:
             if self.options.disallow_any_generics:
@@ -1527,17 +1543,27 @@ def anal_array(
         self.allow_param_spec_literals = old_allow_param_spec_literals
         return self.check_unpacks_in_list(res)
 
-    def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = False) -> Type:
+    def anal_type(
+        self,
+        t: Type,
+        nested: bool = True,
+        *,
+        allow_param_spec: bool = False,
+        allow_ellipsis: bool = False,
+    ) -> Type:
         if nested:
             self.nesting_level += 1
         old_allow_required = self.allow_required
         self.allow_required = False
+        old_allow_ellipsis = self.allow_ellipsis
+        self.allow_ellipsis = allow_ellipsis
         try:
             analyzed = t.accept(self)
         finally:
             if nested:
                 self.nesting_level -= 1
             self.allow_required = old_allow_required
+            self.allow_ellipsis = old_allow_ellipsis
         if (
             not allow_param_spec
             and isinstance(analyzed, ParamSpecType)
diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test
index 4498b2ddc9cf..ecd4fc0a1f00 100644
--- a/test-data/unit/check-literal.test
+++ b/test-data/unit/check-literal.test
@@ -611,8 +611,7 @@ from typing_extensions import Literal
 a: (1, 2, 3)                    # E: Syntax error in type annotation \
                                 # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn)
 b: Literal[[1, 2, 3]]           # E: Parameter 1 of Literal[...] is invalid
-c: [1, 2, 3]                    # E: Bracketed expression "[...]" is not valid as a type \
-                                # N: Did you mean "List[...]"?
+c: [1, 2, 3]                    # E: Bracketed expression "[...]" is not valid as a type
 [builtins fixtures/tuple.pyi]
 [out]
 
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index 3a8ecdf81c7d..dee8a971f925 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -38,6 +38,74 @@ def foo6(x: Callable[[P], int]) -> None: ...  # E: Invalid location for ParamSpe
                                               # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]'
 [builtins fixtures/paramspec.pyi]
 
+[case testParamSpecImports]
+import lib
+from lib import Base
+
+class C(Base[[int]]):
+    def test(self, x: int): ...
+
+class D(lib.Base[[int]]):
+    def test(self, x: int): ...
+
+class E(lib.Base[...]): ...
+reveal_type(E().test)  # N: Revealed type is "def (*Any, **Any)"
+
+[file lib.py]
+from typing import Generic
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+class Base(Generic[P]):
+    def test(self, *args: P.args, **kwargs: P.kwargs) -> None:
+        ...
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecEllipsisInAliases]
+from typing import Any, Callable, Generic, TypeVar
+from typing_extensions import ParamSpec
+
+P = ParamSpec('P')
+R = TypeVar('R')
+Alias = Callable[P, R]
+
+class B(Generic[P]): ...
+Other = B[P]
+
+T = TypeVar('T', bound=Alias[..., Any])
+Alias[..., Any]  # E: Type application is only supported for generic classes
+B[...]
+Other[...]
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecEllipsisInConcatenate]
+from typing import Any, Callable, Generic, TypeVar
+from typing_extensions import ParamSpec, Concatenate
+
+P = ParamSpec('P')
+R = TypeVar('R')
+Alias = Callable[P, R]
+
+IntFun = Callable[Concatenate[int, ...], None]
+f: IntFun
+reveal_type(f)  # N: Revealed type is "def (builtins.int, *Any, **Any)"
+
+g: Callable[Concatenate[int, ...], None]
+reveal_type(g)  # N: Revealed type is "def (builtins.int, *Any, **Any)"
+
+class B(Generic[P]):
+    def test(self, *args: P.args, **kwargs: P.kwargs) -> None:
+        ...
+
+x: B[Concatenate[int, ...]]
+reveal_type(x.test)  # N: Revealed type is "def (builtins.int, *Any, **Any)"
+
+Bad = Callable[Concatenate[int, [int, str]], None]  # E: The last parameter to Concatenate needs to be a ParamSpec \
+                                                    # E: Bracketed expression "[...]" is not valid as a type
+def bad(fn: Callable[Concatenate[P, int], None]):  # E: The last parameter to Concatenate needs to be a ParamSpec
+    ...
+[builtins fixtures/paramspec.pyi]
+
 [case testParamSpecContextManagerLike]
 from typing import Callable, List, Iterator, TypeVar
 from typing_extensions import ParamSpec
@@ -1431,8 +1499,7 @@ from typing import ParamSpec, Generic, List, TypeVar, Callable
 P = ParamSpec("P")
 T = TypeVar("T")
 A = List[T]
-def f(x: A[[int, str]]) -> None: ...  # E: Bracketed expression "[...]" is not valid as a type \
-                                      # N: Did you mean "List[...]"?
+def f(x: A[[int, str]]) -> None: ...  # E: Bracketed expression "[...]" is not valid as a type
 def g(x: A[P]) -> None: ...  # E: Invalid location for ParamSpec "P" \
                              # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]'
 
diff --git a/test-data/unit/check-typevar-defaults.test b/test-data/unit/check-typevar-defaults.test
index 36ec125eb1a4..9015d353fa08 100644
--- a/test-data/unit/check-typevar-defaults.test
+++ b/test-data/unit/check-typevar-defaults.test
@@ -59,9 +59,9 @@ from typing import TypeVar, ParamSpec, Tuple
 from typing_extensions import TypeVarTuple, Unpack
 
 T1 = TypeVar("T1", default=2)  # E: TypeVar "default" must be a type
-T2 = TypeVar("T2", default=[int, str])  # E: Bracketed expression "[...]" is not valid as a type \
-                                        # N: Did you mean "List[...]"? \
-                                        # E: TypeVar "default" must be a type
+T2 = TypeVar("T2", default=[int])  # E: Bracketed expression "[...]" is not valid as a type \
+                                   # N: Did you mean "List[...]"? \
+                                   # E: TypeVar "default" must be a type
 
 P1 = ParamSpec("P1", default=int)  # E: The default argument to ParamSpec must be a list expression, ellipsis, or a ParamSpec
 P2 = ParamSpec("P2", default=2)  # E: The default argument to ParamSpec must be a list expression, ellipsis, or a ParamSpec
diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test
index a098dd8791d4..09d4da54bff3 100644
--- a/test-data/unit/semanal-errors.test
+++ b/test-data/unit/semanal-errors.test
@@ -810,8 +810,8 @@ class C(Generic[t]): pass
 cast(str + str, None)    # E: Cast target is not a type
 cast(C[str][str], None)  # E: Cast target is not a type
 cast(C[str + str], None) # E: Cast target is not a type
-cast([int, str], None)   # E: Bracketed expression "[...]" is not valid as a type \
-                         # N: Did you mean "List[...]"?
+cast([int], None)   # E: Bracketed expression "[...]" is not valid as a type \
+                    # N: Did you mean "List[...]"?
 [out]
 
 [case testInvalidCastTargetType]
@@ -859,8 +859,8 @@ Any(arg=str)  # E: Any(...) is no longer supported. Use cast(Any, ...) instead
 
 [case testTypeListAsType]
 
-def f(x:[int, str]) -> None: # E: Bracketed expression "[...]" is not valid as a type \
-                             # N: Did you mean "List[...]"?
+def f(x: [int]) -> None: # E: Bracketed expression "[...]" is not valid as a type \
+                         # N: Did you mean "List[...]"?
     pass
 [out]
 

From d7d502e5e63092322d57c607e2affac6cb5234b8 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 19 Aug 2023 14:49:55 +0100
Subject: [PATCH 063/288] Support self-types containing ParamSpec (#15903)

Fixes https://github.com/python/mypy/issues/14968
Fixes https://github.com/python/mypy/issues/13911

The fix is simple, as I predicted on Discord, we simply should use
`get_all_type_vars()` instead of `get_type_vars()` (that specifically
returns only `TypeVarType`). I also use this opportunity to tidy-up code
in `bind_self()`, it should be now more readable, and much faster
(especially when compiled with mypyc).

cc @A5rocks

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 mypy/typeops.py                    | 34 ++++++++++--------------
 test-data/unit/check-selftype.test | 42 ++++++++++++++++++++++++++++++
 2 files changed, 56 insertions(+), 20 deletions(-)

diff --git a/mypy/typeops.py b/mypy/typeops.py
index 22dbd9e9f42e..e01aad950573 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -303,7 +303,7 @@ class B(A): pass
         return cast(F, func)
     self_param_type = get_proper_type(func.arg_types[0])
 
-    variables: Sequence[TypeVarLikeType] = []
+    variables: Sequence[TypeVarLikeType]
     if func.variables and supported_self_type(self_param_type):
         from mypy.infer import infer_type_arguments
 
@@ -312,46 +312,40 @@ class B(A): pass
             original_type = erase_to_bound(self_param_type)
         original_type = get_proper_type(original_type)
 
-        all_ids = func.type_var_ids()
+        # Find which of method type variables appear in the type of "self".
+        self_ids = {tv.id for tv in get_all_type_vars(self_param_type)}
+        self_vars = [tv for tv in func.variables if tv.id in self_ids]
+
+        # Solve for these type arguments using the actual class or instance type.
         typeargs = infer_type_arguments(
-            func.variables, self_param_type, original_type, is_supertype=True
+            self_vars, self_param_type, original_type, is_supertype=True
         )
         if (
             is_classmethod
-            # TODO: why do we need the extra guards here?
             and any(isinstance(get_proper_type(t), UninhabitedType) for t in typeargs)
             and isinstance(original_type, (Instance, TypeVarType, TupleType))
         ):
-            # In case we call a classmethod through an instance x, fallback to type(x)
+            # In case we call a classmethod through an instance x, fallback to type(x).
             typeargs = infer_type_arguments(
-                func.variables, self_param_type, TypeType(original_type), is_supertype=True
+                self_vars, self_param_type, TypeType(original_type), is_supertype=True
             )
 
-        ids = [tid for tid in all_ids if any(tid == t.id for t in get_type_vars(self_param_type))]
-
-        # Technically, some constrains might be unsolvable, make them <nothing>.
+        # Update the method signature with the solutions found.
+        # Technically, some constraints might be unsolvable, make them <nothing>.
         to_apply = [t if t is not None else UninhabitedType() for t in typeargs]
-
-        def expand(target: Type) -> Type:
-            return expand_type(target, {id: to_apply[all_ids.index(id)] for id in ids})
-
-        arg_types = [expand(x) for x in func.arg_types[1:]]
-        ret_type = expand(func.ret_type)
-        variables = [v for v in func.variables if v.id not in ids]
+        func = expand_type(func, {tv.id: arg for tv, arg in zip(self_vars, to_apply)})
+        variables = [v for v in func.variables if v not in self_vars]
     else:
-        arg_types = func.arg_types[1:]
-        ret_type = func.ret_type
         variables = func.variables
 
     original_type = get_proper_type(original_type)
     if isinstance(original_type, CallableType) and original_type.is_type_obj():
         original_type = TypeType.make_normalized(original_type.ret_type)
     res = func.copy_modified(
-        arg_types=arg_types,
+        arg_types=func.arg_types[1:],
         arg_kinds=func.arg_kinds[1:],
         arg_names=func.arg_names[1:],
         variables=variables,
-        ret_type=ret_type,
         bound_args=[original_type],
     )
     return cast(F, res)
diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test
index d366e7c33799..77d2d519214a 100644
--- a/test-data/unit/check-selftype.test
+++ b/test-data/unit/check-selftype.test
@@ -1973,3 +1973,45 @@ class B(A):
         reveal_type(self.x.extra)  # N: Revealed type is "builtins.int"
         reveal_type(self.xs[0].extra)  # N: Revealed type is "builtins.int"
 [builtins fixtures/list.pyi]
+
+[case testSelfTypesWithParamSpecExtract]
+from typing import Any, Callable, Generic, TypeVar
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+F = TypeVar("F", bound=Callable[..., Any])
+class Example(Generic[F]):
+    def __init__(self, fn: F) -> None:
+        ...
+    def __call__(self: Example[Callable[P, Any]], *args: P.args, **kwargs: P.kwargs) -> None:
+        ...
+
+def test_fn(a: int, b: str) -> None:
+    ...
+
+example = Example(test_fn)
+example()  # E: Missing positional arguments "a", "b" in call to "__call__" of "Example"
+example(1, "b")  # OK
+[builtins fixtures/list.pyi]
+
+[case testSelfTypesWithParamSpecInfer]
+from typing import TypeVar, Protocol, Type, Callable
+from typing_extensions import ParamSpec
+
+R = TypeVar("R", covariant=True)
+P = ParamSpec("P")
+class AsyncP(Protocol[P]):
+    def meth(self, *args: P.args, **kwargs: P.kwargs) -> None:
+        ...
+
+class Async:
+    @classmethod
+    def async_func(cls: Type[AsyncP[P]]) -> Callable[P, int]:
+        ...
+
+class Add(Async):
+    def meth(self, x: int, y: int) -> None: ...
+
+reveal_type(Add.async_func())  # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> builtins.int"
+reveal_type(Add().async_func())  # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> builtins.int"
+[builtins fixtures/classmethod.pyi]

From 5af76714fa2c526007e045f9c834781f60660e6e Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 19 Aug 2023 11:41:26 -0700
Subject: [PATCH 064/288] Improve match narrowing and reachability analysis
 (#15882)

Fixes #12534, fixes #15878
---
 mypy/checker.py                     | 17 ++++++++++-
 test-data/unit/check-python310.test | 45 ++++++++++++++++++++++++++++-
 2 files changed, 60 insertions(+), 2 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 7625bf28a88c..87dff91758f5 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -4973,7 +4973,7 @@ def visit_match_stmt(self, s: MatchStmt) -> None:
                         self.push_type_map(pattern_map)
                         self.push_type_map(pattern_type.captures)
                     if g is not None:
-                        with self.binder.frame_context(can_skip=True, fall_through=3):
+                        with self.binder.frame_context(can_skip=False, fall_through=3):
                             gt = get_proper_type(self.expr_checker.accept(g))
 
                             if isinstance(gt, DeletedType):
@@ -4982,6 +4982,21 @@ def visit_match_stmt(self, s: MatchStmt) -> None:
                             guard_map, guard_else_map = self.find_isinstance_check(g)
                             else_map = or_conditional_maps(else_map, guard_else_map)
 
+                            # If the guard narrowed the subject, copy the narrowed types over
+                            if isinstance(p, AsPattern):
+                                case_target = p.pattern or p.name
+                                if isinstance(case_target, NameExpr):
+                                    for type_map in (guard_map, else_map):
+                                        if not type_map:
+                                            continue
+                                        for expr in list(type_map):
+                                            if not (
+                                                isinstance(expr, NameExpr)
+                                                and expr.fullname == case_target.fullname
+                                            ):
+                                                continue
+                                            type_map[s.subject] = type_map[expr]
+
                             self.push_type_map(guard_map)
                             self.accept(b)
                     else:
diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test
index 75293ce9d193..0fe6a3d5a5cc 100644
--- a/test-data/unit/check-python310.test
+++ b/test-data/unit/check-python310.test
@@ -1372,7 +1372,7 @@ match m:
         reveal_type(m)  # N: Revealed type is "__main__.Medal"
 
 [case testMatchNarrowUsingPatternGuardSpecialCase]
-def f(x: int | str) -> int:  # E: Missing return statement
+def f(x: int | str) -> int:
     match x:
         case x if isinstance(x, str):
             return 0
@@ -1973,3 +1973,46 @@ def f2(x: T) -> None:
         case DataFrame():  # type: ignore[misc]
             pass
 [builtins fixtures/primitives.pyi]
+
+[case testMatchGuardReachability]
+# flags: --warn-unreachable
+def f1(e: int) -> int:
+    match e:
+        case x if True:
+            return x
+        case _:
+            return 0  # E: Statement is unreachable
+    e = 0  # E: Statement is unreachable
+
+
+def f2(e: int) -> int:
+    match e:
+        case x if bool():
+            return x
+        case _:
+            return 0
+    e = 0  # E: Statement is unreachable
+
+def f3(e: int | str | bytes) -> int:
+    match e:
+        case x if isinstance(x, int):
+            return x
+        case [x]:
+            return 0  # E: Statement is unreachable
+        case str(x):
+            return 0
+    reveal_type(e)  # N: Revealed type is "builtins.bytes"
+    return 0
+
+def f4(e: int | str | bytes) -> int:
+    match e:
+        case int(x):
+            pass
+        case [x]:
+            return 0  # E: Statement is unreachable
+        case x if isinstance(x, str):
+            return 0
+    reveal_type(e)  # N: Revealed type is "Union[builtins.int, builtins.bytes]"
+    return 0
+
+[builtins fixtures/primitives.pyi]

From 5d909f1cdd7dde5e58259dc153959a38d084b988 Mon Sep 17 00:00:00 2001
From: Max Murin <maxmurin@dropbox.com>
Date: Sun, 20 Aug 2023 21:15:18 -0700
Subject: [PATCH 065/288] Sync typeshed for 1.6 (#15918)

Use the sync-typeshed script to sync the latest typeshed before the 1.6
release.

---------

Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Co-authored-by: AlexWaygood <alex.waygood@gmail.com>
---
 mypy/typeshed/stdlib/_typeshed/__init__.pyi | 13 +++++++++++++
 mypy/typeshed/stdlib/argparse.pyi           | 17 ++++++++++++++++-
 mypy/typeshed/stdlib/os/__init__.pyi        |  2 +-
 mypy/typeshed/stdlib/ssl.pyi                |  4 ++--
 4 files changed, 32 insertions(+), 4 deletions(-)

diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi
index 5d03142c6d71..7ae67292e8cd 100644
--- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi
+++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi
@@ -36,6 +36,19 @@ Incomplete: TypeAlias = Any
 # To describe a function parameter that is unused and will work with anything.
 Unused: TypeAlias = object
 
+# Used to mark arguments that default to a sentinel value. This prevents
+# stubtest from complaining about the default value not matching.
+#
+# def foo(x: int | None = sentinel) -> None: ...
+#
+# In cases where the sentinel object is exported and can be used by user code,
+# a construct like this is better:
+#
+# _SentinelType = NewType("_SentinelType", object)
+# sentinel: _SentinelType
+# def foo(x: int | None | _SentinelType = ...) -> None: ...
+sentinel = Any  # noqa: Y026
+
 # stable
 class IdentityFunction(Protocol):
     def __call__(self, __x: _T) -> _T: ...
diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi
index b59dd56ab921..0004250b17a9 100644
--- a/mypy/typeshed/stdlib/argparse.pyi
+++ b/mypy/typeshed/stdlib/argparse.pyi
@@ -1,4 +1,5 @@
 import sys
+from _typeshed import sentinel
 from collections.abc import Callable, Generator, Iterable, Sequence
 from re import Pattern
 from typing import IO, Any, Generic, NewType, NoReturn, Protocol, TypeVar, overload
@@ -334,7 +335,21 @@ class Action(_AttributeHolder):
     if sys.version_info >= (3, 9):
         def format_usage(self) -> str: ...
 
-if sys.version_info >= (3, 9):
+if sys.version_info >= (3, 12):
+    class BooleanOptionalAction(Action):
+        def __init__(
+            self,
+            option_strings: Sequence[str],
+            dest: str,
+            default: _T | str | None = None,
+            type: Callable[[str], _T] | FileType | None = sentinel,  # noqa: Y011
+            choices: Iterable[_T] | None = sentinel,  # noqa: Y011
+            required: bool = False,
+            help: str | None = None,
+            metavar: str | tuple[str, ...] | None = sentinel,  # noqa: Y011
+        ) -> None: ...
+
+elif sys.version_info >= (3, 9):
     class BooleanOptionalAction(Action):
         def __init__(
             self,
diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi
index efe80d82ffba..994595aae781 100644
--- a/mypy/typeshed/stdlib/os/__init__.pyi
+++ b/mypy/typeshed/stdlib/os/__init__.pyi
@@ -912,7 +912,7 @@ else:
             @property
             def si_code(self) -> int: ...
 
-        def waitid(__idtype: int, __ident: int, __options: int) -> waitid_result: ...
+        def waitid(__idtype: int, __ident: int, __options: int) -> waitid_result | None: ...
 
     def wait3(options: int) -> tuple[int, int, Any]: ...
     def wait4(pid: int, options: int) -> tuple[int, int, Any]: ...
diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi
index dd7285196ed9..1c49b130e48f 100644
--- a/mypy/typeshed/stdlib/ssl.pyi
+++ b/mypy/typeshed/stdlib/ssl.pyi
@@ -436,7 +436,7 @@ class SSLContext:
         server_side: bool = False,
         do_handshake_on_connect: bool = True,
         suppress_ragged_eofs: bool = True,
-        server_hostname: str | None = None,
+        server_hostname: str | bytes | None = None,
         session: SSLSession | None = None,
     ) -> SSLSocket: ...
     def wrap_bio(
@@ -444,7 +444,7 @@ class SSLContext:
         incoming: MemoryBIO,
         outgoing: MemoryBIO,
         server_side: bool = False,
-        server_hostname: str | None = None,
+        server_hostname: str | bytes | None = None,
         session: SSLSession | None = None,
     ) -> SSLObject: ...
     def session_stats(self) -> dict[str, int]: ...

From 2c1fd97986064161c542956bb3d9d5043dc0a480 Mon Sep 17 00:00:00 2001
From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com>
Date: Mon, 21 Aug 2023 20:35:04 +1000
Subject: [PATCH 066/288] =?UTF-8?q?(=F0=9F=8E=81)=20`StubGenerator.add=5Ft?=
 =?UTF-8?q?yping=5Fimport`=20returns=20the=20name=20(#15912)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

I noticed that almost all calls to `typing_name` also call
`add_typing_import`.

Co-authored-by: KotlinIsland <kotlinisland@users.noreply.github.com>
---
 mypy/stubgen.py | 55 ++++++++++++++++++++-----------------------------
 1 file changed, 22 insertions(+), 33 deletions(-)

diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index b6fc3e8b7377..aca836c52ce8 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -786,25 +786,20 @@ def visit_func_def(self, o: FuncDef) -> None:
         elif o.name in KNOWN_MAGIC_METHODS_RETURN_TYPES:
             retname = KNOWN_MAGIC_METHODS_RETURN_TYPES[o.name]
         elif has_yield_expression(o) or has_yield_from_expression(o):
-            self.add_typing_import("Generator")
+            generator_name = self.add_typing_import("Generator")
             yield_name = "None"
             send_name = "None"
             return_name = "None"
             if has_yield_from_expression(o):
-                self.add_typing_import("Incomplete")
-                yield_name = send_name = self.typing_name("Incomplete")
+                yield_name = send_name = self.add_typing_import("Incomplete")
             else:
                 for expr, in_assignment in all_yield_expressions(o):
                     if expr.expr is not None and not self.is_none_expr(expr.expr):
-                        self.add_typing_import("Incomplete")
-                        yield_name = self.typing_name("Incomplete")
+                        yield_name = self.add_typing_import("Incomplete")
                     if in_assignment:
-                        self.add_typing_import("Incomplete")
-                        send_name = self.typing_name("Incomplete")
+                        send_name = self.add_typing_import("Incomplete")
             if has_return_statement(o):
-                self.add_typing_import("Incomplete")
-                return_name = self.typing_name("Incomplete")
-            generator_name = self.typing_name("Generator")
+                return_name = self.add_typing_import("Incomplete")
             retname = f"{generator_name}[{yield_name}, {send_name}, {return_name}]"
         elif not has_return_statement(o) and o.abstract_status == NOT_ABSTRACT:
             retname = "None"
@@ -965,21 +960,19 @@ def get_base_types(self, cdef: ClassDef) -> list[str]:
                     nt_fields = self._get_namedtuple_fields(base)
                     assert isinstance(base.args[0], StrExpr)
                     typename = base.args[0].value
-                    if nt_fields is not None:
-                        fields_str = ", ".join(f"({f!r}, {t})" for f, t in nt_fields)
-                        namedtuple_name = self.typing_name("NamedTuple")
-                        base_types.append(f"{namedtuple_name}({typename!r}, [{fields_str}])")
-                        self.add_typing_import("NamedTuple")
-                    else:
+                    if nt_fields is None:
                         # Invalid namedtuple() call, cannot determine fields
-                        base_types.append(self.typing_name("Incomplete"))
+                        base_types.append(self.add_typing_import("Incomplete"))
+                        continue
+                    fields_str = ", ".join(f"({f!r}, {t})" for f, t in nt_fields)
+                    namedtuple_name = self.add_typing_import("NamedTuple")
+                    base_types.append(f"{namedtuple_name}({typename!r}, [{fields_str}])")
                 elif self.is_typed_namedtuple(base):
                     base_types.append(base.accept(p))
                 else:
                     # At this point, we don't know what the base class is, so we
                     # just use Incomplete as the base class.
-                    base_types.append(self.typing_name("Incomplete"))
-                    self.add_typing_import("Incomplete")
+                    base_types.append(self.add_typing_import("Incomplete"))
         for name, value in cdef.keywords.items():
             if name == "metaclass":
                 continue  # handled separately
@@ -1059,9 +1052,9 @@ def _get_namedtuple_fields(self, call: CallExpr) -> list[tuple[str, str]] | None
                     field_names.append(field.value)
             else:
                 return None  # Invalid namedtuple fields type
-            if field_names:
-                self.add_typing_import("Incomplete")
-            incomplete = self.typing_name("Incomplete")
+            if not field_names:
+                return []
+            incomplete = self.add_typing_import("Incomplete")
             return [(field_name, incomplete) for field_name in field_names]
         elif self.is_typed_namedtuple(call):
             fields_arg = call.args[1]
@@ -1092,8 +1085,7 @@ def process_namedtuple(self, lvalue: NameExpr, rvalue: CallExpr) -> None:
         if fields is None:
             self.annotate_as_incomplete(lvalue)
             return
-        self.add_typing_import("NamedTuple")
-        bases = self.typing_name("NamedTuple")
+        bases = self.add_typing_import("NamedTuple")
         # TODO: Add support for generic NamedTuples. Requires `Generic` as base class.
         class_def = f"{self._indent}class {lvalue.name}({bases}):"
         if len(fields) == 0:
@@ -1143,14 +1135,13 @@ def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None:
                     total = arg
                 else:
                     items.append((arg_name, arg))
-        self.add_typing_import("TypedDict")
+        bases = self.add_typing_import("TypedDict")
         p = AliasPrinter(self)
         if any(not key.isidentifier() or keyword.iskeyword(key) for key, _ in items):
             # Keep the call syntax if there are non-identifier or reserved keyword keys.
             self.add(f"{self._indent}{lvalue.name} = {rvalue.accept(p)}\n")
             self._state = VAR
         else:
-            bases = self.typing_name("TypedDict")
             # TODO: Add support for generic TypedDicts. Requires `Generic` as base class.
             if total is not None:
                 bases += f", total={total.accept(p)}"
@@ -1167,8 +1158,7 @@ def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None:
                 self._state = CLASS
 
     def annotate_as_incomplete(self, lvalue: NameExpr) -> None:
-        self.add_typing_import("Incomplete")
-        self.add(f"{self._indent}{lvalue.name}: {self.typing_name('Incomplete')}\n")
+        self.add(f"{self._indent}{lvalue.name}: {self.add_typing_import('Incomplete')}\n")
         self._state = VAR
 
     def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool:
@@ -1384,13 +1374,14 @@ def typing_name(self, name: str) -> str:
         else:
             return name
 
-    def add_typing_import(self, name: str) -> None:
+    def add_typing_import(self, name: str) -> str:
         """Add a name to be imported for typing, unless it's imported already.
 
         The import will be internal to the stub.
         """
         name = self.typing_name(name)
         self.import_tracker.require_name(name)
+        return name
 
     def add_import_line(self, line: str) -> None:
         """Add a line of text to the import section, unless it's already there."""
@@ -1448,11 +1439,9 @@ def get_str_type_of_node(
         if isinstance(rvalue, NameExpr) and rvalue.name in ("True", "False"):
             return "bool"
         if can_infer_optional and isinstance(rvalue, NameExpr) and rvalue.name == "None":
-            self.add_typing_import("Incomplete")
-            return f"{self.typing_name('Incomplete')} | None"
+            return f"{self.add_typing_import('Incomplete')} | None"
         if can_be_any:
-            self.add_typing_import("Incomplete")
-            return self.typing_name("Incomplete")
+            return self.add_typing_import("Incomplete")
         else:
             return ""
 

From e804e8d740631ecbdb3a70330a3ea8497e114e3a Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Mon, 21 Aug 2023 15:32:35 +0300
Subject: [PATCH 067/288] Fix `assert_type` failures when some nodes are
 deferred (#15920)

Now it is quite the same as `reveal_type`. Which is defined here:
https://github.com/python/mypy/blob/2c1fd97986064161c542956bb3d9d5043dc0a480/mypy/checkexpr.py#L4297

Closes #15851
---
 mypy/checkexpr.py                     |  3 +++
 test-data/unit/check-expressions.test | 17 +++++++++++++++++
 2 files changed, 20 insertions(+)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 420cfd990820..4d04390da84a 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -4271,6 +4271,9 @@ def visit_assert_type_expr(self, expr: AssertTypeExpr) -> Type:
             allow_none_return=True,
             always_allow_any=True,
         )
+        if self.chk.current_node_deferred:
+            return source_type
+
         target_type = expr.type
         proper_source_type = get_proper_type(source_type)
         if (
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
index 40ee28830b21..c213255997f8 100644
--- a/test-data/unit/check-expressions.test
+++ b/test-data/unit/check-expressions.test
@@ -1045,6 +1045,23 @@ def reduce_it(s: Scalar) -> Scalar:
 assert_type(reduce_it(True), Scalar)
 [builtins fixtures/tuple.pyi]
 
+[case testAssertTypeWithDeferredNodes]
+from typing import Callable, TypeVar, assert_type
+
+T = TypeVar("T")
+
+def dec(f: Callable[[], T]) -> Callable[[], T]:
+    return f
+
+def func() -> None:
+    some = _inner_func()
+    assert_type(some, int)
+
+@dec
+def _inner_func() -> int:
+    return 1
+[builtins fixtures/tuple.pyi]
+
 -- None return type
 -- ----------------
 

From 7141d6bcff9e26e774e88712015ca6bbe8307c9e Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 21 Aug 2023 23:24:46 +0100
Subject: [PATCH 068/288] More principled approach for callable vs callable
 inference (#15910)

Fixes https://github.com/python/mypy/issues/702 (one of the oldest open
issues)

The approach is quite simple, I essentially replicate the logic from
subtyping check, while replacing each `is_subtype()` call with
`infer_constraints()` call. Note that we don't have various options
available in `constraints.py` so I use all checks, even those that may
be skipped with some strictness flags (so we can infer as many
constraints as possible). Depending on the output of `mypy_primer` we
can try to tune this.

Note that while I was looking at subtyping code, I noticed couple
inconsistencies for ParamSpecs, I added TODOs for them (and updated some
existing TODOs). I also deleted some code that should be dead code after
my previous cleanup.

Among inconsistencies most notably, subtyping between `Parameters` uses
wrong (opposite) direction. Normally, `Parameters` entity behaves
covariantly (w.r.t. types of individual arguments) as a single big
argument, like a tuple plus a map. But then this entity appears in a
contravariant position in `Callable`. This is how we handle it in
`constraints.py`, `join.py`, `meet.py` etc. I tried to fix the
left/right order in `visit_parameters()`, but then one test failed (and
btw same test would also fail if I would try to fix variance in
`visit_instance()`). I decided to leave this for separate PR(s).
---
 mypy/constraints.py                   | 132 +++++++++++++++++++------
 mypy/subtypes.py                      |  32 +++----
 mypy/types.py                         |   8 +-
 test-data/unit/check-inference.test   | 133 ++++++++++++++++++++++++++
 test-data/unit/check-overloading.test |  10 ++
 5 files changed, 260 insertions(+), 55 deletions(-)

diff --git a/mypy/constraints.py b/mypy/constraints.py
index 26504ed06b3e..47f312117264 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -595,15 +595,11 @@ def visit_parameters(self, template: Parameters) -> list[Constraint]:
             return self.infer_against_any(template.arg_types, self.actual)
         if type_state.infer_polymorphic and isinstance(self.actual, Parameters):
             # For polymorphic inference we need to be able to infer secondary constraints
-            # in situations like [x: T] <: P <: [x: int].
-            res = []
-            if len(template.arg_types) == len(self.actual.arg_types):
-                for tt, at in zip(template.arg_types, self.actual.arg_types):
-                    # This avoids bogus constraints like T <: P.args
-                    if isinstance(at, ParamSpecType):
-                        continue
-                    res.extend(infer_constraints(tt, at, self.direction))
-            return res
+            # in situations like [x: T] <: P <: [x: int]. Note we invert direction, since
+            # this function expects direction between callables.
+            return infer_callable_arguments_constraints(
+                template, self.actual, neg_op(self.direction)
+            )
         raise RuntimeError("Parameters cannot be constrained to")
 
     # Non-leaf types
@@ -722,7 +718,8 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                         prefix = mapped_arg.prefix
                         if isinstance(instance_arg, Parameters):
                             # No such thing as variance for ParamSpecs, consider them invariant
-                            # TODO: constraints between prefixes
+                            # TODO: constraints between prefixes using
+                            # infer_callable_arguments_constraints()
                             suffix: Type = instance_arg.copy_modified(
                                 instance_arg.arg_types[len(prefix.arg_types) :],
                                 instance_arg.arg_kinds[len(prefix.arg_kinds) :],
@@ -793,7 +790,8 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                         prefix = template_arg.prefix
                         if isinstance(mapped_arg, Parameters):
                             # No such thing as variance for ParamSpecs, consider them invariant
-                            # TODO: constraints between prefixes
+                            # TODO: constraints between prefixes using
+                            # infer_callable_arguments_constraints()
                             suffix = mapped_arg.copy_modified(
                                 mapped_arg.arg_types[len(prefix.arg_types) :],
                                 mapped_arg.arg_kinds[len(prefix.arg_kinds) :],
@@ -962,24 +960,12 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                         unpack_constraints = build_constraints_for_simple_unpack(
                             template_types, actual_types, neg_op(self.direction)
                         )
-                        template_args = []
-                        cactual_args = []
                         res.extend(unpack_constraints)
                     else:
-                        template_args = template.arg_types
-                        cactual_args = cactual.arg_types
-                    # TODO: use some more principled "formal to actual" logic
-                    # instead of this lock-step loop over argument types. This identical
-                    # logic should be used in 5 places: in Parameters vs Parameters
-                    # inference, in Instance vs Instance inference for prefixes (two
-                    # branches), and in Callable vs Callable inference (two branches).
-                    for t, a in zip(template_args, cactual_args):
-                        # This avoids bogus constraints like T <: P.args
-                        if isinstance(a, (ParamSpecType, UnpackType)):
-                            # TODO: can we infer something useful for *T vs P?
-                            continue
                         # Negate direction due to function argument type contravariance.
-                        res.extend(infer_constraints(t, a, neg_op(self.direction)))
+                        res.extend(
+                            infer_callable_arguments_constraints(template, cactual, self.direction)
+                        )
             else:
                 prefix = param_spec.prefix
                 prefix_len = len(prefix.arg_types)
@@ -1028,11 +1014,9 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                     arg_kinds=cactual.arg_kinds[:prefix_len],
                     arg_names=cactual.arg_names[:prefix_len],
                 )
-
-                for t, a in zip(prefix.arg_types, cactual_prefix.arg_types):
-                    if isinstance(a, ParamSpecType):
-                        continue
-                    res.extend(infer_constraints(t, a, neg_op(self.direction)))
+                res.extend(
+                    infer_callable_arguments_constraints(prefix, cactual_prefix, self.direction)
+                )
 
             template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type
             if template.type_guard is not None:
@@ -1435,3 +1419,89 @@ def build_constraints_for_unpack(
                 for template_arg, item in zip(template_unpack.items, mapped_middle):
                     res.extend(infer_constraints(template_arg, item, direction))
     return res, mapped_prefix + mapped_suffix, template_prefix + template_suffix
+
+
+def infer_directed_arg_constraints(left: Type, right: Type, direction: int) -> list[Constraint]:
+    """Infer constraints between two arguments using direction between original callables."""
+    if isinstance(left, (ParamSpecType, UnpackType)) or isinstance(
+        right, (ParamSpecType, UnpackType)
+    ):
+        # This avoids bogus constraints like T <: P.args
+        # TODO: can we infer something useful for *T vs P?
+        return []
+    if direction == SUBTYPE_OF:
+        # We invert direction to account for argument contravariance.
+        return infer_constraints(left, right, neg_op(direction))
+    else:
+        return infer_constraints(right, left, neg_op(direction))
+
+
+def infer_callable_arguments_constraints(
+    template: CallableType | Parameters, actual: CallableType | Parameters, direction: int
+) -> list[Constraint]:
+    """Infer constraints between argument types of two callables.
+
+    This function essentially extracts four steps from are_parameters_compatible() in
+    subtypes.py that involve subtype checks between argument types. We keep the argument
+    matching logic, but ignore various strictness flags present there, and checks that
+    do not involve subtyping. Then in place of every subtype check we put an infer_constraints()
+    call for the same types.
+    """
+    res = []
+    if direction == SUBTYPE_OF:
+        left, right = template, actual
+    else:
+        left, right = actual, template
+    left_star = left.var_arg()
+    left_star2 = left.kw_arg()
+    right_star = right.var_arg()
+    right_star2 = right.kw_arg()
+
+    # Numbering of steps below matches the one in are_parameters_compatible() for convenience.
+    # Phase 1a: compare star vs star arguments.
+    if left_star is not None and right_star is not None:
+        res.extend(infer_directed_arg_constraints(left_star.typ, right_star.typ, direction))
+    if left_star2 is not None and right_star2 is not None:
+        res.extend(infer_directed_arg_constraints(left_star2.typ, right_star2.typ, direction))
+
+    # Phase 1b: compare left args with corresponding non-star right arguments.
+    for right_arg in right.formal_arguments():
+        left_arg = mypy.typeops.callable_corresponding_argument(left, right_arg)
+        if left_arg is None:
+            continue
+        res.extend(infer_directed_arg_constraints(left_arg.typ, right_arg.typ, direction))
+
+    # Phase 1c: compare left args with right *args.
+    if right_star is not None:
+        right_by_position = right.try_synthesizing_arg_from_vararg(None)
+        assert right_by_position is not None
+        i = right_star.pos
+        assert i is not None
+        while i < len(left.arg_kinds) and left.arg_kinds[i].is_positional():
+            left_by_position = left.argument_by_position(i)
+            assert left_by_position is not None
+            res.extend(
+                infer_directed_arg_constraints(
+                    left_by_position.typ, right_by_position.typ, direction
+                )
+            )
+            i += 1
+
+    # Phase 1d: compare left args with right **kwargs.
+    if right_star2 is not None:
+        right_names = {name for name in right.arg_names if name is not None}
+        left_only_names = set()
+        for name, kind in zip(left.arg_names, left.arg_kinds):
+            if name is None or kind.is_star() or name in right_names:
+                continue
+            left_only_names.add(name)
+
+        right_by_name = right.try_synthesizing_arg_from_kwarg(None)
+        assert right_by_name is not None
+        for name in left_only_names:
+            left_by_name = left.argument_by_name(name)
+            assert left_by_name is not None
+            res.extend(
+                infer_directed_arg_constraints(left_by_name.typ, right_by_name.typ, direction)
+            )
+    return res
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 11847858c62c..288de10cc234 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -590,6 +590,7 @@ def check_mixed(
                             ):
                                 nominal = False
                         else:
+                            # TODO: everywhere else ParamSpecs are handled as invariant.
                             if not check_type_parameter(
                                 lefta, righta, COVARIANT, self.proper_subtype, self.subtype_context
                             ):
@@ -666,13 +667,12 @@ def visit_unpack_type(self, left: UnpackType) -> bool:
         return False
 
     def visit_parameters(self, left: Parameters) -> bool:
-        if isinstance(self.right, (Parameters, CallableType)):
-            right = self.right
-            if isinstance(right, CallableType):
-                right = right.with_unpacked_kwargs()
+        if isinstance(self.right, Parameters):
+            # TODO: direction here should be opposite, this function expects
+            # order of callables, while parameters are contravariant.
             return are_parameters_compatible(
                 left,
-                right,
+                self.right,
                 is_compat=self._is_subtype,
                 ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names,
             )
@@ -723,14 +723,6 @@ def visit_callable_type(self, left: CallableType) -> bool:
         elif isinstance(right, TypeType):
             # This is unsound, we don't check the __init__ signature.
             return left.is_type_obj() and self._is_subtype(left.ret_type, right.item)
-        elif isinstance(right, Parameters):
-            # this doesn't check return types.... but is needed for is_equivalent
-            return are_parameters_compatible(
-                left.with_unpacked_kwargs(),
-                right,
-                is_compat=self._is_subtype,
-                ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names,
-            )
         else:
             return False
 
@@ -1456,7 +1448,6 @@ def g(x: int) -> int: ...
         right,
         is_compat=is_compat,
         ignore_pos_arg_names=ignore_pos_arg_names,
-        check_args_covariantly=check_args_covariantly,
         allow_partial_overlap=allow_partial_overlap,
         strict_concatenate_check=strict_concatenate_check,
     )
@@ -1480,7 +1471,6 @@ def are_parameters_compatible(
     *,
     is_compat: Callable[[Type, Type], bool],
     ignore_pos_arg_names: bool = False,
-    check_args_covariantly: bool = False,
     allow_partial_overlap: bool = False,
     strict_concatenate_check: bool = False,
 ) -> bool:
@@ -1534,7 +1524,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
 
     # Phase 1b: Check non-star args: for every arg right can accept, left must
     #           also accept. The only exception is if we are allowing partial
-    #           partial overlaps: in that case, we ignore optional args on the right.
+    #           overlaps: in that case, we ignore optional args on the right.
     for right_arg in right.formal_arguments():
         left_arg = mypy.typeops.callable_corresponding_argument(left, right_arg)
         if left_arg is None:
@@ -1548,7 +1538,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
 
     # Phase 1c: Check var args. Right has an infinite series of optional positional
     #           arguments. Get all further positional args of left, and make sure
-    #           they're more general then the corresponding member in right.
+    #           they're more general than the corresponding member in right.
     if right_star is not None:
         # Synthesize an anonymous formal argument for the right
         right_by_position = right.try_synthesizing_arg_from_vararg(None)
@@ -1575,7 +1565,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
 
     # Phase 1d: Check kw args. Right has an infinite series of optional named
     #           arguments. Get all further named args of left, and make sure
-    #           they're more general then the corresponding member in right.
+    #           they're more general than the corresponding member in right.
     if right_star2 is not None:
         right_names = {name for name in right.arg_names if name is not None}
         left_only_names = set()
@@ -1643,6 +1633,10 @@ def are_args_compatible(
     allow_partial_overlap: bool,
     is_compat: Callable[[Type, Type], bool],
 ) -> bool:
+    if left.required and right.required:
+        # If both arguments are required allow_partial_overlap has no effect.
+        allow_partial_overlap = False
+
     def is_different(left_item: object | None, right_item: object | None) -> bool:
         """Checks if the left and right items are different.
 
@@ -1670,7 +1664,7 @@ def is_different(left_item: object | None, right_item: object | None) -> bool:
 
     # If right's argument is optional, left's must also be
     # (unless we're relaxing the checks to allow potential
-    # rather then definite compatibility).
+    # rather than definite compatibility).
     if not allow_partial_overlap and not right.required and left.required:
         return False
 
diff --git a/mypy/types.py b/mypy/types.py
index d4e2fc7cb63c..301ce6e0cf18 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1545,9 +1545,6 @@ class FormalArgument(NamedTuple):
     required: bool
 
 
-# TODO: should this take bound typevars too? what would this take?
-#   ex: class Z(Generic[P, T]): ...; Z[[V], V]
-# What does a typevar even mean in this context?
 class Parameters(ProperType):
     """Type that represents the parameters to a function.
 
@@ -1559,6 +1556,8 @@ class Parameters(ProperType):
         "arg_names",
         "min_args",
         "is_ellipsis_args",
+        # TODO: variables don't really belong here, but they are used to allow hacky support
+        # for forall . Foo[[x: T], T] by capturing generic callable with ParamSpec, see #15909
         "variables",
     )
 
@@ -1602,7 +1601,7 @@ def copy_modified(
             variables=variables if variables is not _dummy else self.variables,
         )
 
-    # the following are copied from CallableType. Is there a way to decrease code duplication?
+    # TODO: here is a lot of code duplication with Callable type, fix this.
     def var_arg(self) -> FormalArgument | None:
         """The formal argument for *args."""
         for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)):
@@ -2046,7 +2045,6 @@ def param_spec(self) -> ParamSpecType | None:
         return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix)
 
     def expand_param_spec(self, c: Parameters) -> CallableType:
-        # TODO: try deleting variables from Parameters after new type inference is default.
         variables = c.variables
         return self.copy_modified(
             arg_types=self.arg_types[:-2] + c.arg_types,
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 9ee30b4df859..56d3fe2b4ce7 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -3553,3 +3553,136 @@ class E(D): ...
 
 reveal_type([E(), D()])  # N: Revealed type is "builtins.list[__main__.D]"
 reveal_type([D(), E()])  # N: Revealed type is "builtins.list[__main__.D]"
+
+[case testCallableInferenceAgainstCallablePosVsStar]
+from typing import TypeVar, Callable, Tuple
+
+T = TypeVar('T')
+S = TypeVar('S')
+
+def f(x: Callable[[T, S], None]) -> Tuple[T, S]: ...
+def g(*x: int) -> None: ...
+reveal_type(f(g))  # N: Revealed type is "Tuple[builtins.int, builtins.int]"
+[builtins fixtures/list.pyi]
+
+[case testCallableInferenceAgainstCallableStarVsPos]
+from typing import TypeVar, Callable, Tuple, Protocol
+
+T = TypeVar('T', contravariant=True)
+S = TypeVar('S', contravariant=True)
+
+class Call(Protocol[T, S]):
+    def __call__(self, __x: T, *args: S) -> None: ...
+
+def f(x: Call[T, S]) -> Tuple[T, S]: ...
+def g(*x: int) -> None: ...
+reveal_type(f(g))  # N: Revealed type is "Tuple[builtins.int, builtins.int]"
+[builtins fixtures/list.pyi]
+
+[case testCallableInferenceAgainstCallableNamedVsStar]
+from typing import TypeVar, Callable, Tuple, Protocol
+
+T = TypeVar('T', contravariant=True)
+S = TypeVar('S', contravariant=True)
+
+class Call(Protocol[T, S]):
+    def __call__(self, *, x: T, y: S) -> None: ...
+
+def f(x: Call[T, S]) -> Tuple[T, S]: ...
+def g(**kwargs: int) -> None: ...
+reveal_type(f(g))  # N: Revealed type is "Tuple[builtins.int, builtins.int]"
+[builtins fixtures/list.pyi]
+
+[case testCallableInferenceAgainstCallableStarVsNamed]
+from typing import TypeVar, Callable, Tuple, Protocol
+
+T = TypeVar('T', contravariant=True)
+S = TypeVar('S', contravariant=True)
+
+class Call(Protocol[T, S]):
+    def __call__(self, *, x: T, **kwargs: S) -> None: ...
+
+def f(x: Call[T, S]) -> Tuple[T, S]: ...
+def g(**kwargs: int) -> None: pass
+reveal_type(f(g))  # N: Revealed type is "Tuple[builtins.int, builtins.int]"
+[builtins fixtures/list.pyi]
+
+[case testCallableInferenceAgainstCallableNamedVsNamed]
+from typing import TypeVar, Callable, Tuple, Protocol
+
+T = TypeVar('T', contravariant=True)
+S = TypeVar('S', contravariant=True)
+
+class Call(Protocol[T, S]):
+    def __call__(self, *, x: T, y: S) -> None: ...
+
+def f(x: Call[T, S]) -> Tuple[T, S]: ...
+
+# Note: order of names is different w.r.t. protocol
+def g(*, y: int, x: str) -> None: pass
+reveal_type(f(g))  # N: Revealed type is "Tuple[builtins.str, builtins.int]"
+[builtins fixtures/list.pyi]
+
+[case testCallableInferenceAgainstCallablePosOnlyVsNamed]
+from typing import TypeVar, Callable, Tuple, Protocol
+
+T = TypeVar('T', contravariant=True)
+S = TypeVar('S', contravariant=True)
+
+class Call(Protocol[T]):
+    def __call__(self, *, x: T) -> None: ...
+
+def f(x: Call[T]) -> Tuple[T, T]: ...
+
+def g(__x: str) -> None: pass
+reveal_type(f(g))  # N: Revealed type is "Tuple[<nothing>, <nothing>]" \
+                   # E: Argument 1 to "f" has incompatible type "Callable[[str], None]"; expected "Call[<nothing>]"
+[builtins fixtures/list.pyi]
+
+[case testCallableInferenceAgainstCallableNamedVsPosOnly]
+from typing import TypeVar, Callable, Tuple, Protocol
+
+T = TypeVar('T', contravariant=True)
+S = TypeVar('S', contravariant=True)
+
+class Call(Protocol[T]):
+    def __call__(self, __x: T) -> None: ...
+
+def f(x: Call[T]) -> Tuple[T, T]: ...
+
+def g(*, x: str) -> None: pass
+reveal_type(f(g))  # N: Revealed type is "Tuple[<nothing>, <nothing>]" \
+                   # E: Argument 1 to "f" has incompatible type "Callable[[NamedArg(str, 'x')], None]"; expected "Call[<nothing>]"
+[builtins fixtures/list.pyi]
+
+[case testCallableInferenceAgainstCallablePosOnlyVsKwargs]
+from typing import TypeVar, Callable, Tuple, Protocol
+
+T = TypeVar('T', contravariant=True)
+S = TypeVar('S', contravariant=True)
+
+class Call(Protocol[T]):
+    def __call__(self, __x: T) -> None: ...
+
+def f(x: Call[T]) -> Tuple[T, T]: ...
+
+def g(**x: str) -> None: pass
+reveal_type(f(g))  # N: Revealed type is "Tuple[<nothing>, <nothing>]" \
+                   # E: Argument 1 to "f" has incompatible type "Callable[[KwArg(str)], None]"; expected "Call[<nothing>]"
+[builtins fixtures/list.pyi]
+
+[case testCallableInferenceAgainstCallableNamedVsArgs]
+from typing import TypeVar, Callable, Tuple, Protocol
+
+T = TypeVar('T', contravariant=True)
+S = TypeVar('S', contravariant=True)
+
+class Call(Protocol[T]):
+    def __call__(self, *, x: T) -> None: ...
+
+def f(x: Call[T]) -> Tuple[T, T]: ...
+
+def g(*args: str) -> None: pass
+reveal_type(f(g))  # N: Revealed type is "Tuple[<nothing>, <nothing>]" \
+                   # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[<nothing>]"
+[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index b778dc50b376..ede4a2e4cf62 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -6640,3 +6640,13 @@ def bar(x): ...
 
 reveal_type(bar)  # N: Revealed type is "Overload(def (builtins.int) -> builtins.float, def (builtins.str) -> builtins.str)"
 [builtins fixtures/paramspec.pyi]
+
+[case testOverloadOverlapWithNameOnlyArgs]
+from typing import overload
+
+@overload
+def d(x: int) -> int: ...
+@overload
+def d(f: int, *, x: int) -> str: ...
+def d(*args, **kwargs): ...
+[builtins fixtures/tuple.pyi]

From 48835a362d86eb9964b0350e4453daf14c76fe8e Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Wed, 23 Aug 2023 06:47:32 +0100
Subject: [PATCH 069/288] Fix stubtest mypy enum.Flag edge case (#15933)

Fix edge-case stubtest crashes when an instance of an enum.Flag that is not a
member of that enum.Flag is used as a parameter default

Fixes #15923.

Note: the test cases I've added reproduce the crash, but only if you're
using a compiled version of mypy. (Some of them only repro the crash on
<=py310, but some repro it on py311+ as well.)

We run stubtest tests in CI with compiled mypy, so they do repro the
crash in the context of our CI.
---
 mypy/stubtest.py          |   2 +-
 mypy/test/teststubtest.py | 103 ++++++++++++++++++++++++++++++++++++--
 2 files changed, 101 insertions(+), 4 deletions(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index 906a8c923b37..b2506e6dcc02 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -1553,7 +1553,7 @@ def anytype() -> mypy.types.AnyType:
     value: bool | int | str
     if isinstance(runtime, bytes):
         value = bytes_to_human_readable_repr(runtime)
-    elif isinstance(runtime, enum.Enum):
+    elif isinstance(runtime, enum.Enum) and isinstance(runtime.name, str):
         value = runtime.name
     elif isinstance(runtime, (bool, int, str)):
         value = runtime
diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py
index cd72bd9300d1..a6733a9e8bd0 100644
--- a/mypy/test/teststubtest.py
+++ b/mypy/test/teststubtest.py
@@ -64,6 +64,7 @@ def __init__(self, name: str) -> None: ...
 
 class Coroutine(Generic[_T_co, _S, _R]): ...
 class Iterable(Generic[_T_co]): ...
+class Iterator(Iterable[_T_co]): ...
 class Mapping(Generic[_K, _V]): ...
 class Match(Generic[AnyStr]): ...
 class Sequence(Iterable[_T_co]): ...
@@ -86,7 +87,9 @@ def __init__(self) -> None: pass
     def __repr__(self) -> str: pass
 class type: ...
 
-class tuple(Sequence[T_co], Generic[T_co]): ...
+class tuple(Sequence[T_co], Generic[T_co]):
+    def __ge__(self, __other: tuple[T_co, ...]) -> bool: pass
+
 class dict(Mapping[KT, VT]): ...
 
 class function: pass
@@ -105,6 +108,39 @@ def classmethod(f: T) -> T: ...
 def staticmethod(f: T) -> T: ...
 """
 
+stubtest_enum_stub = """
+import sys
+from typing import Any, TypeVar, Iterator
+
+_T = TypeVar('_T')
+
+class EnumMeta(type):
+    def __len__(self) -> int: pass
+    def __iter__(self: type[_T]) -> Iterator[_T]: pass
+    def __reversed__(self: type[_T]) -> Iterator[_T]: pass
+    def __getitem__(self: type[_T], name: str) -> _T: pass
+
+class Enum(metaclass=EnumMeta):
+    def __new__(cls: type[_T], value: object) -> _T: pass
+    def __repr__(self) -> str: pass
+    def __str__(self) -> str: pass
+    def __format__(self, format_spec: str) -> str: pass
+    def __hash__(self) -> Any: pass
+    def __reduce_ex__(self, proto: Any) -> Any: pass
+    name: str
+    value: Any
+
+class Flag(Enum):
+    def __or__(self: _T, other: _T) -> _T: pass
+    def __and__(self: _T, other: _T) -> _T: pass
+    def __xor__(self: _T, other: _T) -> _T: pass
+    def __invert__(self: _T) -> _T: pass
+    if sys.version_info >= (3, 11):
+        __ror__ = __or__
+        __rand__ = __and__
+        __rxor__ = __xor__
+"""
+
 
 def run_stubtest(
     stub: str, runtime: str, options: list[str], config_file: str | None = None
@@ -114,6 +150,8 @@ def run_stubtest(
             f.write(stubtest_builtins_stub)
         with open("typing.pyi", "w") as f:
             f.write(stubtest_typing_stub)
+        with open("enum.pyi", "w") as f:
+            f.write(stubtest_enum_stub)
         with open(f"{TEST_MODULE_NAME}.pyi", "w") as f:
             f.write(stub)
         with open(f"{TEST_MODULE_NAME}.py", "w") as f:
@@ -954,16 +992,15 @@ def fizz(self): pass
 
     @collect_cases
     def test_enum(self) -> Iterator[Case]:
+        yield Case(stub="import enum", runtime="import enum", error=None)
         yield Case(
             stub="""
-            import enum
             class X(enum.Enum):
                 a: int
                 b: str
                 c: str
             """,
             runtime="""
-            import enum
             class X(enum.Enum):
                 a = 1
                 b = "asdf"
@@ -971,6 +1008,66 @@ class X(enum.Enum):
             """,
             error="X.c",
         )
+        yield Case(
+            stub="""
+            class Flags1(enum.Flag):
+                a: int
+                b: int
+            def foo(x: Flags1 = ...) -> None: ...
+            """,
+            runtime="""
+            class Flags1(enum.Flag):
+                a = 1
+                b = 2
+            def foo(x=Flags1.a|Flags1.b): pass
+            """,
+            error=None,
+        )
+        yield Case(
+            stub="""
+            class Flags2(enum.Flag):
+                a: int
+                b: int
+            def bar(x: Flags2 | None = None) -> None: ...
+            """,
+            runtime="""
+            class Flags2(enum.Flag):
+                a = 1
+                b = 2
+            def bar(x=Flags2.a|Flags2.b): pass
+            """,
+            error="bar",
+        )
+        yield Case(
+            stub="""
+            class Flags3(enum.Flag):
+                a: int
+                b: int
+            def baz(x: Flags3 | None = ...) -> None: ...
+            """,
+            runtime="""
+            class Flags3(enum.Flag):
+                a = 1
+                b = 2
+            def baz(x=Flags3(0)): pass
+            """,
+            error=None,
+        )
+        yield Case(
+            stub="""
+            class Flags4(enum.Flag):
+                a: int
+                b: int
+            def spam(x: Flags4 | None = None) -> None: ...
+            """,
+            runtime="""
+            class Flags4(enum.Flag):
+                a = 1
+                b = 2
+            def spam(x=Flags4(0)): pass
+            """,
+            error="spam",
+        )
 
     @collect_cases
     def test_decorator(self) -> Iterator[Case]:

From 6f650cff9ab21f81069e0ae30c92eae94219ea63 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 23 Aug 2023 20:26:27 +0100
Subject: [PATCH 070/288] Streamline some elements of variadic types support
 (#15924)

Fixes https://github.com/python/mypy/issues/13981
Fixes https://github.com/python/mypy/issues/15241
Fixes https://github.com/python/mypy/issues/15495
Fixes https://github.com/python/mypy/issues/15633
Fixes https://github.com/python/mypy/issues/15667
Fixes https://github.com/python/mypy/issues/15897
Fixes https://github.com/python/mypy/issues/15929

OK, I started following the plan outlined in
https://github.com/python/mypy/pull/15879. In this PR I focused mostly
on "kinematics". Here are some notes (in random order):
* I decided to normalize `TupleType` and `Instance` items in
`semanal_typeargs.py` (not in the type constructors, like for unions).
It looks like a simpler way to normalize for now. After this, we can
rely on the fact that only non-trivial (more below on what is trivial)
variadic items in a type list is either `*Ts` or `*tuple[X, ...]`. A
single top-level `TupleType` can appear in an unpack only as type of
`*args`.
* Callables turned out to be tricky. There is certain tight coupling
between `FuncDef.type` and `FuncDef.arguments` that makes it hard to
normalize prefix to be expressed as individual arguments _at
definition_. I faced exactly the same problem when I implemented `**`
unpacking for TypedDicts. So we have two choices: either handle prefixes
everywhere, or use normalization helper in relevant code. I propose to
go with the latter (it worked well for `**` unpacking).
* I decided to switch `Unpack` to be disallowed by default in
`typeanal.py`, only very specific potions are allowed now. Although this
required plumbing `allow_unpack` all the way from `semanal.py`,
conceptually it is simple. This is similar to how `ParamSpec` is
handled.
* This PR fixes all currently open crash issues (some intentionally,
some accidentally) plus a bunch of TODOs I found in the tests (but not
all).
* I decided to simplify `TypeAliasExpr` (and made it simple reference to
the `SymbolNode`, like e.g. `TypedDictExpr` and `NamedTupleExpr`). This
is not strictly necessary for this PR, but it makes some parts of it a
bit simpler, and I wanted to do it for long time.

Here is a more detailed plan of what I am leaving for future PRs (in
rough order of priority):
* Close non-crash open issues (it looks like there are only three, and
all seem to be straightforward)
* Handle trivial items in `UnpackType` gracefully. These are `<nothing>`
and `Any` (and also potentially `object`). They can appear e.g. after a
user error. Currently they can cause assert crashes. (Not sure what is
the best way to do this).
* Go over current places where `Unpack` is handled, and verify both
possible variadic items are handled.
* Audit variadic `Instance` constrains and subtyping (the latter is
probably OK, but the former may be broken).
* Audit `Callable` and `Tuple` subtyping for variadic-related edge cases
(constraints seem OK for these).
* Figure out story about `map_instance_to_supertype()` (if no changes
are needed, add tests for subclassing).
* Clear most remaining TODOs.
* Go once more over the large scale picture and check whether we have
some important parts missing (or unhandled interactions between those).
* Verify various "advanced" typing features work well with
`TypeVarTuple`s (and add some support if missing but looks important).
* Enable this feature by default.

I hope to finish these in next few weeks.
---
 mypy/checker.py                         |   5 +-
 mypy/checkexpr.py                       |  11 +-
 mypy/constraints.py                     |  46 ++++--
 mypy/expandtype.py                      | 111 +++-----------
 mypy/message_registry.py                |   3 +-
 mypy/mixedtraverser.py                  |   2 +-
 mypy/nodes.py                           |  17 +--
 mypy/semanal.py                         |  18 ++-
 mypy/semanal_typeargs.py                |  45 ++++--
 mypy/server/astmerge.py                 |   5 -
 mypy/server/deps.py                     |   2 +-
 mypy/strconv.py                         |   2 +-
 mypy/subtypes.py                        |  18 ++-
 mypy/typeanal.py                        |  72 ++++++++--
 mypy/typeops.py                         |   9 +-
 mypy/types.py                           |  80 ++++++++++-
 mypy/types_utils.py                     |  12 +-
 mypy/typevartuples.py                   |  15 +-
 test-data/unit/check-generics.test      |   1 -
 test-data/unit/check-typevar-tuple.test | 183 +++++++++++++++++++-----
 test-data/unit/check-varargs.test       |   2 +-
 test-data/unit/semanal-errors.test      |   9 +-
 22 files changed, 439 insertions(+), 229 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 87dff91758f5..a44601b83e21 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -4665,10 +4665,7 @@ def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]:
             isinstance(iterable, TupleType)
             and iterable.partial_fallback.type.fullname == "builtins.tuple"
         ):
-            joined: Type = UninhabitedType()
-            for item in iterable.items:
-                joined = join_types(joined, item)
-            return iterator, joined
+            return iterator, tuple_fallback(iterable).args[0]
         else:
             # Non-tuple iterable.
             return iterator, echk.check_method_call_by_name("__next__", iterator, [], [], expr)[0]
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 4d04390da84a..6de317f587cb 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -168,7 +168,7 @@
     UninhabitedType,
     UnionType,
     UnpackType,
-    flatten_nested_tuples,
+    find_unpack_in_list,
     flatten_nested_unions,
     get_proper_type,
     get_proper_types,
@@ -185,7 +185,6 @@
 )
 from mypy.typestate import type_state
 from mypy.typevars import fill_typevars
-from mypy.typevartuples import find_unpack_in_list
 from mypy.util import split_module_names
 from mypy.visitor import ExpressionVisitor
 
@@ -1600,7 +1599,7 @@ def check_callable_call(
         See the docstring of check_call for more information.
         """
         # Always unpack **kwargs before checking a call.
-        callee = callee.with_unpacked_kwargs()
+        callee = callee.with_unpacked_kwargs().with_normalized_var_args()
         if callable_name is None and callee.name:
             callable_name = callee.name
         ret_type = get_proper_type(callee.ret_type)
@@ -2409,7 +2408,12 @@ def check_argument_types(
                                 + unpacked_type.items[inner_unpack_index + 1 :]
                             )
                             callee_arg_kinds = [ARG_POS] * len(actuals)
+                    elif isinstance(unpacked_type, TypeVarTupleType):
+                        callee_arg_types = [orig_callee_arg_type]
+                        callee_arg_kinds = [ARG_STAR]
                     else:
+                        # TODO: Any and <nothing> can appear in Unpack (as a result of user error),
+                        # fail gracefully here and elsewhere (and/or normalize them away).
                         assert isinstance(unpacked_type, Instance)
                         assert unpacked_type.type.fullname == "builtins.tuple"
                         callee_arg_types = [unpacked_type.args[0]] * len(actuals)
@@ -4451,7 +4455,6 @@ class C(Generic[T, Unpack[Ts]]): ...
 
         prefix = next(i for (i, v) in enumerate(vars) if isinstance(v, TypeVarTupleType))
         suffix = len(vars) - prefix - 1
-        args = flatten_nested_tuples(args)
         if len(args) < len(vars) - 1:
             self.msg.incompatible_type_application(len(vars), len(args), ctx)
             return [AnyType(TypeOfAny.from_error)] * len(vars)
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 47f312117264..edce11e778ab 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -49,6 +49,7 @@
     UninhabitedType,
     UnionType,
     UnpackType,
+    find_unpack_in_list,
     get_proper_type,
     has_recursive_types,
     has_type_vars,
@@ -57,7 +58,7 @@
 )
 from mypy.types_utils import is_union_with_any
 from mypy.typestate import type_state
-from mypy.typevartuples import extract_unpack, find_unpack_in_list, split_with_mapped_and_template
+from mypy.typevartuples import extract_unpack, split_with_mapped_and_template
 
 if TYPE_CHECKING:
     from mypy.infer import ArgumentInferContext
@@ -155,16 +156,33 @@ def infer_constraints_for_callable(
                 # not to hold we can always handle the prefixes too.
                 inner_unpack = unpacked_type.items[0]
                 assert isinstance(inner_unpack, UnpackType)
-                inner_unpacked_type = inner_unpack.type
-                assert isinstance(inner_unpacked_type, TypeVarTupleType)
+                inner_unpacked_type = get_proper_type(inner_unpack.type)
                 suffix_len = len(unpacked_type.items) - 1
-                constraints.append(
-                    Constraint(
-                        inner_unpacked_type,
-                        SUPERTYPE_OF,
-                        TupleType(actual_types[:-suffix_len], inner_unpacked_type.tuple_fallback),
+                if isinstance(inner_unpacked_type, TypeVarTupleType):
+                    # Variadic item can be either *Ts...
+                    constraints.append(
+                        Constraint(
+                            inner_unpacked_type,
+                            SUPERTYPE_OF,
+                            TupleType(
+                                actual_types[:-suffix_len], inner_unpacked_type.tuple_fallback
+                            ),
+                        )
                     )
-                )
+                else:
+                    # ...or it can be a homogeneous tuple.
+                    assert (
+                        isinstance(inner_unpacked_type, Instance)
+                        and inner_unpacked_type.type.fullname == "builtins.tuple"
+                    )
+                    for at in actual_types[:-suffix_len]:
+                        constraints.extend(
+                            infer_constraints(inner_unpacked_type.args[0], at, SUPERTYPE_OF)
+                        )
+                # Now handle the suffix (if any).
+                if suffix_len:
+                    for tt, at in zip(unpacked_type.items[1:], actual_types[-suffix_len:]):
+                        constraints.extend(infer_constraints(tt, at, SUPERTYPE_OF))
             else:
                 assert False, "mypy bug: unhandled constraint inference case"
         else:
@@ -863,6 +881,16 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
             and self.direction == SUPERTYPE_OF
         ):
             for item in actual.items:
+                if isinstance(item, UnpackType):
+                    unpacked = get_proper_type(item.type)
+                    if isinstance(unpacked, TypeVarType):
+                        # Cannot infer anything for T from [T, ...] <: *Ts
+                        continue
+                    assert (
+                        isinstance(unpacked, Instance)
+                        and unpacked.type.fullname == "builtins.tuple"
+                    )
+                    item = unpacked.args[0]
                 cb = infer_constraints(template.args[0], item, SUPERTYPE_OF)
                 res.extend(cb)
             return res
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 6f69e09936db..e71f6429d9c0 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -2,7 +2,7 @@
 
 from typing import Final, Iterable, Mapping, Sequence, TypeVar, cast, overload
 
-from mypy.nodes import ARG_POS, ARG_STAR, ArgKind, Var
+from mypy.nodes import ARG_STAR, Var
 from mypy.state import state
 from mypy.types import (
     ANY_STRATEGY,
@@ -35,12 +35,11 @@
     UninhabitedType,
     UnionType,
     UnpackType,
-    flatten_nested_tuples,
     flatten_nested_unions,
     get_proper_type,
     split_with_prefix_and_suffix,
 )
-from mypy.typevartuples import find_unpack_in_list, split_with_instance
+from mypy.typevartuples import split_with_instance
 
 # Solving the import cycle:
 import mypy.type_visitor  # ruff: isort: skip
@@ -294,11 +293,10 @@ def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType
     def visit_parameters(self, t: Parameters) -> Type:
         return t.copy_modified(arg_types=self.expand_types(t.arg_types))
 
-    # TODO: can we simplify this method? It is too long.
-    def interpolate_args_for_unpack(
-        self, t: CallableType, var_arg: UnpackType
-    ) -> tuple[list[str | None], list[ArgKind], list[Type]]:
+    def interpolate_args_for_unpack(self, t: CallableType, var_arg: UnpackType) -> list[Type]:
         star_index = t.arg_kinds.index(ARG_STAR)
+        prefix = self.expand_types(t.arg_types[:star_index])
+        suffix = self.expand_types(t.arg_types[star_index + 1 :])
 
         var_arg_type = get_proper_type(var_arg.type)
         # We have something like Unpack[Tuple[Unpack[Ts], X1, X2]]
@@ -306,89 +304,19 @@ def interpolate_args_for_unpack(
             expanded_tuple = var_arg_type.accept(self)
             assert isinstance(expanded_tuple, ProperType) and isinstance(expanded_tuple, TupleType)
             expanded_items = expanded_tuple.items
+            fallback = var_arg_type.partial_fallback
         else:
             # We have plain Unpack[Ts]
+            assert isinstance(var_arg_type, TypeVarTupleType)
+            fallback = var_arg_type.tuple_fallback
             expanded_items_res = self.expand_unpack(var_arg)
             if isinstance(expanded_items_res, list):
                 expanded_items = expanded_items_res
             else:
                 # We got Any or <nothing>
-                arg_types = (
-                    t.arg_types[:star_index] + [expanded_items_res] + t.arg_types[star_index + 1 :]
-                )
-                return t.arg_names, t.arg_kinds, arg_types
-
-        expanded_unpack_index = find_unpack_in_list(expanded_items)
-        # This is the case where we just have Unpack[Tuple[X1, X2, X3]]
-        # (for example if either the tuple had no unpacks, or the unpack in the
-        # tuple got fully expanded to something with fixed length)
-        if expanded_unpack_index is None:
-            arg_names = (
-                t.arg_names[:star_index]
-                + [None] * len(expanded_items)
-                + t.arg_names[star_index + 1 :]
-            )
-            arg_kinds = (
-                t.arg_kinds[:star_index]
-                + [ARG_POS] * len(expanded_items)
-                + t.arg_kinds[star_index + 1 :]
-            )
-            arg_types = (
-                self.expand_types(t.arg_types[:star_index])
-                + expanded_items
-                + self.expand_types(t.arg_types[star_index + 1 :])
-            )
-        else:
-            # If Unpack[Ts] simplest form still has an unpack or is a
-            # homogenous tuple, then only the prefix can be represented as
-            # positional arguments, and we pass Tuple[Unpack[Ts-1], Y1, Y2]
-            # as the star arg, for example.
-            expanded_unpack = expanded_items[expanded_unpack_index]
-            assert isinstance(expanded_unpack, UnpackType)
-
-            # Extract the TypeVarTuple, so we can get a tuple fallback from it.
-            expanded_unpacked_tvt = expanded_unpack.type
-            if isinstance(expanded_unpacked_tvt, TypeVarTupleType):
-                fallback = expanded_unpacked_tvt.tuple_fallback
-            else:
-                # This can happen when tuple[Any, ...] is used to "patch" a variadic
-                # generic type without type arguments provided, or when substitution is
-                # homogeneous tuple.
-                assert isinstance(expanded_unpacked_tvt, ProperType)
-                assert isinstance(expanded_unpacked_tvt, Instance)
-                assert expanded_unpacked_tvt.type.fullname == "builtins.tuple"
-                fallback = expanded_unpacked_tvt
-
-            prefix_len = expanded_unpack_index
-            arg_names = t.arg_names[:star_index] + [None] * prefix_len + t.arg_names[star_index:]
-            arg_kinds = (
-                t.arg_kinds[:star_index] + [ARG_POS] * prefix_len + t.arg_kinds[star_index:]
-            )
-            if (
-                len(expanded_items) == 1
-                and isinstance(expanded_unpack.type, ProperType)
-                and isinstance(expanded_unpack.type, Instance)
-            ):
-                assert expanded_unpack.type.type.fullname == "builtins.tuple"
-                # Normalize *args: *tuple[X, ...] -> *args: X
-                arg_types = (
-                    self.expand_types(t.arg_types[:star_index])
-                    + [expanded_unpack.type.args[0]]
-                    + self.expand_types(t.arg_types[star_index + 1 :])
-                )
-            else:
-                arg_types = (
-                    self.expand_types(t.arg_types[:star_index])
-                    + expanded_items[:prefix_len]
-                    # Constructing the Unpack containing the tuple without the prefix.
-                    + [
-                        UnpackType(TupleType(expanded_items[prefix_len:], fallback))
-                        if len(expanded_items) - prefix_len > 1
-                        else expanded_items[prefix_len]
-                    ]
-                    + self.expand_types(t.arg_types[star_index + 1 :])
-                )
-        return arg_names, arg_kinds, arg_types
+                return prefix + [expanded_items_res] + suffix
+        new_unpack = UnpackType(TupleType(expanded_items, fallback))
+        return prefix + [new_unpack] + suffix
 
     def visit_callable_type(self, t: CallableType) -> CallableType:
         param_spec = t.param_spec()
@@ -427,20 +355,20 @@ def visit_callable_type(self, t: CallableType) -> CallableType:
                 )
 
         var_arg = t.var_arg()
+        needs_normalization = False
         if var_arg is not None and isinstance(var_arg.typ, UnpackType):
-            arg_names, arg_kinds, arg_types = self.interpolate_args_for_unpack(t, var_arg.typ)
+            needs_normalization = True
+            arg_types = self.interpolate_args_for_unpack(t, var_arg.typ)
         else:
-            arg_names = t.arg_names
-            arg_kinds = t.arg_kinds
             arg_types = self.expand_types(t.arg_types)
-
-        return t.copy_modified(
+        expanded = t.copy_modified(
             arg_types=arg_types,
-            arg_names=arg_names,
-            arg_kinds=arg_kinds,
             ret_type=t.ret_type.accept(self),
             type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None),
         )
+        if needs_normalization:
+            return expanded.with_normalized_var_args()
+        return expanded
 
     def visit_overloaded(self, t: Overloaded) -> Type:
         items: list[CallableType] = []
@@ -460,9 +388,6 @@ def expand_types_with_unpack(
         indicates use of Any or some error occurred earlier. In this case callers should
         simply propagate the resulting type.
         """
-        # TODO: this will cause a crash on aliases like A = Tuple[int, Unpack[A]].
-        # Although it is unlikely anyone will write this, we should fail gracefully.
-        typs = flatten_nested_tuples(typs)
         items: list[Type] = []
         for item in typs:
             if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType):
diff --git a/mypy/message_registry.py b/mypy/message_registry.py
index bd3b8571b69e..713ec2e3c759 100644
--- a/mypy/message_registry.py
+++ b/mypy/message_registry.py
@@ -171,7 +171,8 @@ def with_additional_msg(self, info: str) -> ErrorMessage:
 IMPLICIT_GENERIC_ANY_BUILTIN: Final = (
     'Implicit generic "Any". Use "{}" and specify generic parameters'
 )
-INVALID_UNPACK = "{} cannot be unpacked (must be tuple or TypeVarTuple)"
+INVALID_UNPACK: Final = "{} cannot be unpacked (must be tuple or TypeVarTuple)"
+INVALID_UNPACK_POSITION: Final = "Unpack is only valid in a variadic position"
 
 # TypeVar
 INCOMPATIBLE_TYPEVAR_VALUE: Final = 'Value of type variable "{}" of {} cannot be {}'
diff --git a/mypy/mixedtraverser.py b/mypy/mixedtraverser.py
index 771f87fc6bd6..dfde41859c67 100644
--- a/mypy/mixedtraverser.py
+++ b/mypy/mixedtraverser.py
@@ -49,7 +49,7 @@ def visit_class_def(self, o: ClassDef) -> None:
     def visit_type_alias_expr(self, o: TypeAliasExpr) -> None:
         super().visit_type_alias_expr(o)
         self.in_type_alias_expr = True
-        o.type.accept(self)
+        o.node.target.accept(self)
         self.in_type_alias_expr = False
 
     def visit_type_var_expr(self, o: TypeVarExpr) -> None:
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 452a4f643255..7efb01c1b18e 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -2625,27 +2625,14 @@ def deserialize(cls, data: JsonDict) -> TypeVarTupleExpr:
 class TypeAliasExpr(Expression):
     """Type alias expression (rvalue)."""
 
-    __slots__ = ("type", "tvars", "no_args", "node")
+    __slots__ = ("node",)
 
-    __match_args__ = ("type", "tvars", "no_args", "node")
+    __match_args__ = ("node",)
 
-    # The target type.
-    type: mypy.types.Type
-    # Names of type variables used to define the alias
-    tvars: list[str]
-    # Whether this alias was defined in bare form. Used to distinguish
-    # between
-    #     A = List
-    # and
-    #     A = List[Any]
-    no_args: bool
     node: TypeAlias
 
     def __init__(self, node: TypeAlias) -> None:
         super().__init__()
-        self.type = node.target
-        self.tvars = [v.name for v in node.alias_tvars]
-        self.no_args = node.no_args
         self.node = node
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
diff --git a/mypy/semanal.py b/mypy/semanal.py
index ef66c9276664..55d4e6a3f506 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -3680,7 +3680,10 @@ def disable_invalid_recursive_aliases(
         """Prohibit and fix recursive type aliases that are invalid/unsupported."""
         messages = []
         if is_invalid_recursive_alias({current_node}, current_node.target):
-            messages.append("Invalid recursive alias: a union item of itself")
+            target = (
+                "tuple" if isinstance(get_proper_type(current_node.target), TupleType) else "union"
+            )
+            messages.append(f"Invalid recursive alias: a {target} item of itself")
         if detect_diverging_alias(
             current_node, current_node.target, self.lookup_qualified, self.tvar_scope
         ):
@@ -4213,6 +4216,7 @@ def get_typevarlike_argument(
         *,
         allow_unbound_tvars: bool = False,
         allow_param_spec_literals: bool = False,
+        allow_unpack: bool = False,
         report_invalid_typevar_arg: bool = True,
     ) -> ProperType | None:
         try:
@@ -4224,6 +4228,7 @@ def get_typevarlike_argument(
                 report_invalid_types=False,
                 allow_unbound_tvars=allow_unbound_tvars,
                 allow_param_spec_literals=allow_param_spec_literals,
+                allow_unpack=allow_unpack,
             )
             if analyzed is None:
                 # Type variables are special: we need to place them in the symbol table
@@ -4375,6 +4380,7 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool:
                     s,
                     allow_unbound_tvars=True,
                     report_invalid_typevar_arg=False,
+                    allow_unpack=True,
                 )
                 default = tv_arg or AnyType(TypeOfAny.from_error)
                 if not isinstance(default, UnpackType):
@@ -5289,6 +5295,7 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None:
         # Probably always allow Parameters literals, and validate in semanal_typeargs.py
         base = expr.base
         if isinstance(base, RefExpr) and isinstance(base.node, TypeAlias):
+            allow_unpack = base.node.tvar_tuple_index is not None
             alias = base.node
             if any(isinstance(t, ParamSpecType) for t in alias.alias_tvars):
                 has_param_spec = True
@@ -5297,9 +5304,11 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None:
                 has_param_spec = False
                 num_args = -1
         elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo):
+            allow_unpack = base.node.has_type_var_tuple_type
             has_param_spec = base.node.has_param_spec_type
             num_args = len(base.node.type_vars)
         else:
+            allow_unpack = False
             has_param_spec = False
             num_args = -1
 
@@ -5317,6 +5326,7 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None:
                 allow_unbound_tvars=self.allow_unbound_tvars,
                 allow_placeholder=True,
                 allow_param_spec_literals=has_param_spec,
+                allow_unpack=allow_unpack,
             )
             if analyzed is None:
                 return None
@@ -6486,6 +6496,7 @@ def expr_to_analyzed_type(
         allow_type_any: bool = False,
         allow_unbound_tvars: bool = False,
         allow_param_spec_literals: bool = False,
+        allow_unpack: bool = False,
     ) -> Type | None:
         if isinstance(expr, CallExpr):
             # This is a legacy syntax intended mostly for Python 2, we keep it for
@@ -6516,6 +6527,7 @@ def expr_to_analyzed_type(
             allow_type_any=allow_type_any,
             allow_unbound_tvars=allow_unbound_tvars,
             allow_param_spec_literals=allow_param_spec_literals,
+            allow_unpack=allow_unpack,
         )
 
     def analyze_type_expr(self, expr: Expression) -> None:
@@ -6537,6 +6549,7 @@ def type_analyzer(
         allow_placeholder: bool = False,
         allow_required: bool = False,
         allow_param_spec_literals: bool = False,
+        allow_unpack: bool = False,
         report_invalid_types: bool = True,
         prohibit_self_type: str | None = None,
         allow_type_any: bool = False,
@@ -6555,6 +6568,7 @@ def type_analyzer(
             allow_placeholder=allow_placeholder,
             allow_required=allow_required,
             allow_param_spec_literals=allow_param_spec_literals,
+            allow_unpack=allow_unpack,
             prohibit_self_type=prohibit_self_type,
             allow_type_any=allow_type_any,
         )
@@ -6575,6 +6589,7 @@ def anal_type(
         allow_placeholder: bool = False,
         allow_required: bool = False,
         allow_param_spec_literals: bool = False,
+        allow_unpack: bool = False,
         report_invalid_types: bool = True,
         prohibit_self_type: str | None = None,
         allow_type_any: bool = False,
@@ -6612,6 +6627,7 @@ def anal_type(
             allow_placeholder=allow_placeholder,
             allow_required=allow_required,
             allow_param_spec_literals=allow_param_spec_literals,
+            allow_unpack=allow_unpack,
             report_invalid_types=report_invalid_types,
             prohibit_self_type=prohibit_self_type,
             allow_type_any=allow_type_any,
diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py
index e188955dabbb..8d8ef66b5c69 100644
--- a/mypy/semanal_typeargs.py
+++ b/mypy/semanal_typeargs.py
@@ -14,13 +14,14 @@
 from mypy.errors import Errors
 from mypy.messages import format_type
 from mypy.mixedtraverser import MixedTraverserVisitor
-from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile
+from mypy.nodes import ARG_STAR, Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile
 from mypy.options import Options
 from mypy.scope import Scope
 from mypy.subtypes import is_same_type, is_subtype
 from mypy.typeanal import set_any_tvars
 from mypy.types import (
     AnyType,
+    CallableType,
     Instance,
     Parameters,
     ParamSpecType,
@@ -116,20 +117,39 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None:
             # the expansion, most likely it will result in the same kind of error.
             get_proper_type(t).accept(self)
 
+    def visit_tuple_type(self, t: TupleType) -> None:
+        t.items = flatten_nested_tuples(t.items)
+        # We could also normalize Tuple[*tuple[X, ...]] -> tuple[X, ...] like in
+        # expand_type() but we can't do this here since it is not a translator visitor,
+        # and we need to return an Instance instead of TupleType.
+        super().visit_tuple_type(t)
+
+    def visit_callable_type(self, t: CallableType) -> None:
+        super().visit_callable_type(t)
+        # Normalize trivial unpack in var args as *args: *tuple[X, ...] -> *args: X
+        if t.is_var_arg:
+            star_index = t.arg_kinds.index(ARG_STAR)
+            star_type = t.arg_types[star_index]
+            if isinstance(star_type, UnpackType):
+                p_type = get_proper_type(star_type.type)
+                if isinstance(p_type, Instance):
+                    assert p_type.type.fullname == "builtins.tuple"
+                    t.arg_types[star_index] = p_type.args[0]
+
     def visit_instance(self, t: Instance) -> None:
         # Type argument counts were checked in the main semantic analyzer pass. We assume
         # that the counts are correct here.
         info = t.type
         if isinstance(info, FakeInfo):
             return  # https://github.com/python/mypy/issues/11079
+        t.args = tuple(flatten_nested_tuples(t.args))
+        # TODO: fix #15410 and #15411.
         self.validate_args(info.name, t.args, info.defn.type_vars, t)
         super().visit_instance(t)
 
     def validate_args(
         self, name: str, args: Sequence[Type], type_vars: list[TypeVarLikeType], ctx: Context
     ) -> bool:
-        # TODO: we need to do flatten_nested_tuples and validate arg count for instances
-        # similar to how do we do this for type aliases above, but this may have perf penalty.
         if any(isinstance(v, TypeVarTupleType) for v in type_vars):
             prefix = next(i for (i, v) in enumerate(type_vars) if isinstance(v, TypeVarTupleType))
             tvt = type_vars[prefix]
@@ -198,6 +218,7 @@ def validate_args(
         return is_error
 
     def visit_unpack_type(self, typ: UnpackType) -> None:
+        super().visit_unpack_type(typ)
         proper_type = get_proper_type(typ.type)
         if isinstance(proper_type, TupleType):
             return
@@ -205,18 +226,14 @@ def visit_unpack_type(self, typ: UnpackType) -> None:
             return
         if isinstance(proper_type, Instance) and proper_type.type.fullname == "builtins.tuple":
             return
-        if (
-            isinstance(proper_type, UnboundType)
-            or isinstance(proper_type, AnyType)
-            and proper_type.type_of_any == TypeOfAny.from_error
-        ):
+        if isinstance(proper_type, AnyType) and proper_type.type_of_any == TypeOfAny.from_error:
             return
-
-        # TODO: Infer something when it can't be unpacked to allow rest of
-        # typechecking to work.
-        self.fail(
-            message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ
-        )
+        if not isinstance(proper_type, UnboundType):
+            # Avoid extra errors if there were some errors already.
+            self.fail(
+                message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ
+            )
+        typ.type = AnyType(TypeOfAny.from_error)
 
     def check_type_var_values(
         self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context
diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py
index f58a4eedabc8..862c3898a383 100644
--- a/mypy/server/astmerge.py
+++ b/mypy/server/astmerge.py
@@ -73,7 +73,6 @@
     SymbolNode,
     SymbolTable,
     TypeAlias,
-    TypeAliasExpr,
     TypedDictExpr,
     TypeInfo,
     Var,
@@ -326,10 +325,6 @@ def visit_enum_call_expr(self, node: EnumCallExpr) -> None:
         self.process_synthetic_type_info(node.info)
         super().visit_enum_call_expr(node)
 
-    def visit_type_alias_expr(self, node: TypeAliasExpr) -> None:
-        self.fixup_type(node.type)
-        super().visit_type_alias_expr(node)
-
     # Others
 
     def visit_var(self, node: Var) -> None:
diff --git a/mypy/server/deps.py b/mypy/server/deps.py
index ed85b74f2206..9ed2d4549629 100644
--- a/mypy/server/deps.py
+++ b/mypy/server/deps.py
@@ -472,7 +472,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
                 self.add_dependency(make_trigger(class_name + ".__init__"))
                 self.add_dependency(make_trigger(class_name + ".__new__"))
             if isinstance(rvalue, IndexExpr) and isinstance(rvalue.analyzed, TypeAliasExpr):
-                self.add_type_dependencies(rvalue.analyzed.type)
+                self.add_type_dependencies(rvalue.analyzed.node.target)
             elif typ:
                 self.add_type_dependencies(typ)
         else:
diff --git a/mypy/strconv.py b/mypy/strconv.py
index c428addd43aa..42a07c7f62fa 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -511,7 +511,7 @@ def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> str:
         return self.dump(a, o)
 
     def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> str:
-        return f"TypeAliasExpr({self.stringify_type(o.type)})"
+        return f"TypeAliasExpr({self.stringify_type(o.node.target)})"
 
     def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> str:
         return f"NamedTupleExpr:{o.line}({o.info.name}, {self.stringify_type(o.info.tuple_type) if o.info.tuple_type is not None else None})"
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 288de10cc234..58ae4efdf582 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -660,6 +660,8 @@ def visit_type_var_tuple(self, left: TypeVarTupleType) -> bool:
         return self._is_subtype(left.upper_bound, self.right)
 
     def visit_unpack_type(self, left: UnpackType) -> bool:
+        # TODO: Ideally we should not need this (since it is not a real type).
+        # Instead callers (upper level types) should handle it when it appears in type list.
         if isinstance(self.right, UnpackType):
             return self._is_subtype(left.type, self.right.type)
         if isinstance(self.right, Instance) and self.right.type.fullname == "builtins.object":
@@ -744,7 +746,15 @@ def visit_tuple_type(self, left: TupleType) -> bool:
                     # TODO: We shouldn't need this special case. This is currently needed
                     #       for isinstance(x, tuple), though it's unclear why.
                     return True
-                return all(self._is_subtype(li, iter_type) for li in left.items)
+                for li in left.items:
+                    if isinstance(li, UnpackType):
+                        unpack = get_proper_type(li.type)
+                        if isinstance(unpack, Instance):
+                            assert unpack.type.fullname == "builtins.tuple"
+                            li = unpack.args[0]
+                    if not self._is_subtype(li, iter_type):
+                        return False
+                return True
             elif self._is_subtype(left.partial_fallback, right) and self._is_subtype(
                 mypy.typeops.tuple_fallback(left), right
             ):
@@ -752,6 +762,7 @@ def visit_tuple_type(self, left: TupleType) -> bool:
             return False
         elif isinstance(right, TupleType):
             if len(left.items) != len(right.items):
+                # TODO: handle tuple with variadic items better.
                 return False
             if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)):
                 return False
@@ -1385,8 +1396,8 @@ def g(x: int) -> int: ...
         whether or not we check the args covariantly.
     """
     # Normalize both types before comparing them.
-    left = left.with_unpacked_kwargs()
-    right = right.with_unpacked_kwargs()
+    left = left.with_unpacked_kwargs().with_normalized_var_args()
+    right = right.with_unpacked_kwargs().with_normalized_var_args()
 
     if is_compat_return is None:
         is_compat_return = is_compat
@@ -1539,6 +1550,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
     # Phase 1c: Check var args. Right has an infinite series of optional positional
     #           arguments. Get all further positional args of left, and make sure
     #           they're more general than the corresponding member in right.
+    # TODO: are we handling UnpackType correctly here?
     if right_star is not None:
         # Synthesize an anonymous formal argument for the right
         right_by_position = right.try_synthesizing_arg_from_vararg(None)
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index b15b5c7654ba..14b37539afea 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -195,6 +195,7 @@ def __init__(
         allow_placeholder: bool = False,
         allow_required: bool = False,
         allow_param_spec_literals: bool = False,
+        allow_unpack: bool = False,
         report_invalid_types: bool = True,
         prohibit_self_type: str | None = None,
         allowed_alias_tvars: list[TypeVarLikeType] | None = None,
@@ -241,6 +242,8 @@ def __init__(
         self.prohibit_self_type = prohibit_self_type
         # Allow variables typed as Type[Any] and type (useful for base classes).
         self.allow_type_any = allow_type_any
+        self.allow_type_var_tuple = False
+        self.allow_unpack = allow_unpack
 
     def lookup_qualified(
         self, name: str, ctx: Context, suppress_errors: bool = False
@@ -277,7 +280,10 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool)
                     return PlaceholderType(
                         node.fullname,
                         self.anal_array(
-                            t.args, allow_param_spec=True, allow_param_spec_literals=True
+                            t.args,
+                            allow_param_spec=True,
+                            allow_param_spec_literals=True,
+                            allow_unpack=True,
                         ),
                         t.line,
                     )
@@ -365,6 +371,13 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool)
                     self.fail(f'TypeVarTuple "{t.name}" is unbound', t, code=codes.VALID_TYPE)
                     return AnyType(TypeOfAny.from_error)
                 assert isinstance(tvar_def, TypeVarTupleType)
+                if not self.allow_type_var_tuple:
+                    self.fail(
+                        f'TypeVarTuple "{t.name}" is only valid with an unpack',
+                        t,
+                        code=codes.VALID_TYPE,
+                    )
+                    return AnyType(TypeOfAny.from_error)
                 if len(t.args) > 0:
                     self.fail(
                         f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE
@@ -390,6 +403,7 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool)
                     t.args,
                     allow_param_spec=True,
                     allow_param_spec_literals=node.has_param_spec_type,
+                    allow_unpack=node.tvar_tuple_index is not None,
                 )
                 if node.has_param_spec_type and len(node.alias_tvars) == 1:
                     an_args = self.pack_paramspec_args(an_args)
@@ -531,7 +545,7 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ
                 instance = self.named_type("builtins.tuple", [self.anal_type(t.args[0])])
                 instance.line = t.line
                 return instance
-            return self.tuple_type(self.anal_array(t.args))
+            return self.tuple_type(self.anal_array(t.args, allow_unpack=True))
         elif fullname == "typing.Union":
             items = self.anal_array(t.args)
             return UnionType.make_union(items)
@@ -631,7 +645,13 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ
             if len(t.args) != 1:
                 self.fail("Unpack[...] requires exactly one type argument", t)
                 return AnyType(TypeOfAny.from_error)
-            return UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column)
+            if not self.allow_unpack:
+                self.fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE)
+                return AnyType(TypeOfAny.from_error)
+            self.allow_type_var_tuple = True
+            result = UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column)
+            self.allow_type_var_tuple = False
+            return result
         elif fullname in SELF_TYPE_NAMES:
             if t.args:
                 self.fail("Self type cannot have type arguments", t)
@@ -666,7 +686,7 @@ def analyze_type_with_type_info(
 
         if len(args) > 0 and info.fullname == "builtins.tuple":
             fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line)
-            return TupleType(self.anal_array(args), fallback, ctx.line)
+            return TupleType(self.anal_array(args, allow_unpack=True), fallback, ctx.line)
 
         # Analyze arguments and (usually) construct Instance type. The
         # number of type arguments and their values are
@@ -679,7 +699,10 @@ def analyze_type_with_type_info(
         instance = Instance(
             info,
             self.anal_array(
-                args, allow_param_spec=True, allow_param_spec_literals=info.has_param_spec_type
+                args,
+                allow_param_spec=True,
+                allow_param_spec_literals=info.has_param_spec_type,
+                allow_unpack=info.has_type_var_tuple_type,
             ),
             ctx.line,
             ctx.column,
@@ -715,7 +738,7 @@ def analyze_type_with_type_info(
             if info.special_alias:
                 return instantiate_type_alias(
                     info.special_alias,
-                    # TODO: should we allow NamedTuples generic in ParamSpec?
+                    # TODO: should we allow NamedTuples generic in ParamSpec and TypeVarTuple?
                     self.anal_array(args),
                     self.fail,
                     False,
@@ -723,7 +746,9 @@ def analyze_type_with_type_info(
                     self.options,
                     use_standard_error=True,
                 )
-            return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance)
+            return tup.copy_modified(
+                items=self.anal_array(tup.items, allow_unpack=True), fallback=instance
+            )
         td = info.typeddict_type
         if td is not None:
             # The class has a TypedDict[...] base class so it will be
@@ -940,7 +965,23 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type:
                     self.anal_star_arg_type(t.arg_types[-1], ARG_STAR2, nested=nested),
                 ]
             else:
-                arg_types = self.anal_array(t.arg_types, nested=nested)
+                arg_types = self.anal_array(t.arg_types, nested=nested, allow_unpack=True)
+                star_index = None
+                if ARG_STAR in arg_kinds:
+                    star_index = arg_kinds.index(ARG_STAR)
+                star2_index = None
+                if ARG_STAR2 in arg_kinds:
+                    star2_index = arg_kinds.index(ARG_STAR2)
+                validated_args: list[Type] = []
+                for i, at in enumerate(arg_types):
+                    if isinstance(at, UnpackType) and i not in (star_index, star2_index):
+                        self.fail(
+                            message_registry.INVALID_UNPACK_POSITION, at, code=codes.VALID_TYPE
+                        )
+                        validated_args.append(AnyType(TypeOfAny.from_error))
+                    else:
+                        validated_args.append(at)
+                arg_types = validated_args
             # If there were multiple (invalid) unpacks, the arg types list will become shorter,
             # we need to trim the kinds/names as well to avoid crashes.
             arg_kinds = t.arg_kinds[: len(arg_types)]
@@ -1012,7 +1053,7 @@ def anal_star_arg_type(self, t: Type, kind: ArgKind, nested: bool) -> Type:
                         line=t.line,
                         column=t.column,
                     )
-        return self.anal_type(t, nested=nested)
+        return self.anal_type(t, nested=nested, allow_unpack=True)
 
     def visit_overloaded(self, t: Overloaded) -> Type:
         # Overloaded types are manually constructed in semanal.py by analyzing the
@@ -1051,7 +1092,7 @@ def visit_tuple_type(self, t: TupleType) -> Type:
             if t.partial_fallback.type
             else self.named_type("builtins.tuple", [any_type])
         )
-        return TupleType(self.anal_array(t.items), fallback, t.line)
+        return TupleType(self.anal_array(t.items, allow_unpack=True), fallback, t.line)
 
     def visit_typeddict_type(self, t: TypedDictType) -> Type:
         items = {
@@ -1534,12 +1575,17 @@ def anal_array(
         *,
         allow_param_spec: bool = False,
         allow_param_spec_literals: bool = False,
+        allow_unpack: bool = False,
     ) -> list[Type]:
         old_allow_param_spec_literals = self.allow_param_spec_literals
         self.allow_param_spec_literals = allow_param_spec_literals
         res: list[Type] = []
         for t in a:
-            res.append(self.anal_type(t, nested, allow_param_spec=allow_param_spec))
+            res.append(
+                self.anal_type(
+                    t, nested, allow_param_spec=allow_param_spec, allow_unpack=allow_unpack
+                )
+            )
         self.allow_param_spec_literals = old_allow_param_spec_literals
         return self.check_unpacks_in_list(res)
 
@@ -1549,6 +1595,7 @@ def anal_type(
         nested: bool = True,
         *,
         allow_param_spec: bool = False,
+        allow_unpack: bool = False,
         allow_ellipsis: bool = False,
     ) -> Type:
         if nested:
@@ -1557,6 +1604,8 @@ def anal_type(
         self.allow_required = False
         old_allow_ellipsis = self.allow_ellipsis
         self.allow_ellipsis = allow_ellipsis
+        old_allow_unpack = self.allow_unpack
+        self.allow_unpack = allow_unpack
         try:
             analyzed = t.accept(self)
         finally:
@@ -1564,6 +1613,7 @@ def anal_type(
                 self.nesting_level -= 1
             self.allow_required = old_allow_required
             self.allow_ellipsis = old_allow_ellipsis
+            self.allow_unpack = old_allow_unpack
         if (
             not allow_param_spec
             and isinstance(analyzed, ParamSpecType)
diff --git a/mypy/typeops.py b/mypy/typeops.py
index e01aad950573..0e0bc348942e 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -105,19 +105,18 @@ def tuple_fallback(typ: TupleType) -> Instance:
             unpacked_type = get_proper_type(item.type)
             if isinstance(unpacked_type, TypeVarTupleType):
                 items.append(unpacked_type.upper_bound)
-            elif isinstance(unpacked_type, TupleType):
-                # TODO: might make sense to do recursion here to support nested unpacks
-                # of tuple constants
-                items.extend(unpacked_type.items)
             elif (
                 isinstance(unpacked_type, Instance)
                 and unpacked_type.type.fullname == "builtins.tuple"
             ):
                 items.append(unpacked_type.args[0])
+            elif isinstance(unpacked_type, (AnyType, UninhabitedType)):
+                continue
             else:
-                raise NotImplementedError
+                raise NotImplementedError(unpacked_type)
         else:
             items.append(item)
+    # TODO: we should really use a union here, tuple types are special.
     return Instance(info, [join_type_list(items)], extra_attrs=typ.partial_fallback.extra_attrs)
 
 
diff --git a/mypy/types.py b/mypy/types.py
index 301ce6e0cf18..c71412f4ea58 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -2075,6 +2075,68 @@ def with_unpacked_kwargs(self) -> NormalizedCallableType:
             )
         )
 
+    def with_normalized_var_args(self) -> Self:
+        var_arg = self.var_arg()
+        if not var_arg or not isinstance(var_arg.typ, UnpackType):
+            return self
+        unpacked = get_proper_type(var_arg.typ.type)
+        if not isinstance(unpacked, TupleType):
+            # Note that we don't normalize *args: *tuple[X, ...] -> *args: X,
+            # this should be done once in semanal_typeargs.py for user-defined types,
+            # and we ourselves should never construct such type.
+            return self
+        unpack_index = find_unpack_in_list(unpacked.items)
+        if unpack_index == 0 and len(unpacked.items) > 1:
+            # Already normalized.
+            return self
+
+        # Boilerplate:
+        var_arg_index = self.arg_kinds.index(ARG_STAR)
+        types_prefix = self.arg_types[:var_arg_index]
+        kinds_prefix = self.arg_kinds[:var_arg_index]
+        names_prefix = self.arg_names[:var_arg_index]
+        types_suffix = self.arg_types[var_arg_index + 1 :]
+        kinds_suffix = self.arg_kinds[var_arg_index + 1 :]
+        names_suffix = self.arg_names[var_arg_index + 1 :]
+        no_name: str | None = None  # to silence mypy
+
+        # Now we have something non-trivial to do.
+        if unpack_index is None:
+            # Plain *Tuple[X, Y, Z] -> replace with ARG_POS completely
+            types_middle = unpacked.items
+            kinds_middle = [ARG_POS] * len(unpacked.items)
+            names_middle = [no_name] * len(unpacked.items)
+        else:
+            # *Tuple[X, *Ts, Y, Z] or *Tuple[X, *tuple[T, ...], X, Z], here
+            # we replace the prefix by ARG_POS (this is how some places expect
+            # Callables to be represented)
+            nested_unpack = unpacked.items[unpack_index]
+            assert isinstance(nested_unpack, UnpackType)
+            nested_unpacked = get_proper_type(nested_unpack.type)
+            if unpack_index == len(unpacked.items) - 1:
+                # Normalize also single item tuples like
+                #   *args: *Tuple[*tuple[X, ...]] -> *args: X
+                #   *args: *Tuple[*Ts] -> *args: *Ts
+                # This may be not strictly necessary, but these are very verbose.
+                if isinstance(nested_unpacked, Instance):
+                    assert nested_unpacked.type.fullname == "builtins.tuple"
+                    new_unpack = nested_unpacked.args[0]
+                else:
+                    assert isinstance(nested_unpacked, TypeVarTupleType)
+                    new_unpack = nested_unpack
+            else:
+                new_unpack = UnpackType(
+                    unpacked.copy_modified(items=unpacked.items[unpack_index:])
+                )
+            types_middle = unpacked.items[:unpack_index] + [new_unpack]
+            kinds_middle = [ARG_POS] * unpack_index + [ARG_STAR]
+            names_middle = [no_name] * unpack_index + [self.arg_names[var_arg_index]]
+        return self.copy_modified(
+            arg_types=types_prefix + types_middle + types_suffix,
+            arg_kinds=kinds_prefix + kinds_middle + kinds_suffix,
+            arg_names=names_prefix + names_middle + names_suffix,
+        )
+
     def __hash__(self) -> int:
         # self.is_type_obj() will fail if self.fallback.type is a FakeInfo
         if isinstance(self.fallback.type, FakeInfo):
@@ -2259,10 +2321,6 @@ def __init__(
     ) -> None:
         super().__init__(line, column)
         self.partial_fallback = fallback
-        # TODO: flatten/normalize unpack items (very similar to unions) here.
-        # Probably also for instances, type aliases, callables, and Unpack itself. For example,
-        # tuple[*tuple[X, ...], ...] -> tuple[X, ...] and Tuple[*tuple[X, ...]] -> tuple[X, ...].
-        # Currently normalization happens in expand_type() et al., which is sub-optimal.
         self.items = items
         self.implicit = implicit
 
@@ -3426,6 +3484,20 @@ def flatten_nested_unions(
     return flat_items
 
 
+def find_unpack_in_list(items: Sequence[Type]) -> int | None:
+    unpack_index: int | None = None
+    for i, item in enumerate(items):
+        if isinstance(item, UnpackType):
+            # We cannot fail here, so we must check this in an earlier
+            # semanal phase.
+            # Funky code here avoids mypyc narrowing the type of unpack_index.
+            old_index = unpack_index
+            assert old_index is None
+            # Don't return so that we can also sanity check there is only one.
+            unpack_index = i
+    return unpack_index
+
+
 def flatten_nested_tuples(types: Sequence[Type]) -> list[Type]:
     """Recursively flatten TupleTypes nested with Unpack.
 
diff --git a/mypy/types_utils.py b/mypy/types_utils.py
index 7f2e38ef3753..f289ac3e9ed1 100644
--- a/mypy/types_utils.py
+++ b/mypy/types_utils.py
@@ -54,7 +54,7 @@ def strip_type(typ: Type) -> Type:
 
 
 def is_invalid_recursive_alias(seen_nodes: set[TypeAlias], target: Type) -> bool:
-    """Flag aliases like A = Union[int, A] (and similar mutual aliases).
+    """Flag aliases like A = Union[int, A], T = tuple[int, *T] (and similar mutual aliases).
 
     Such aliases don't make much sense, and cause problems in later phases.
     """
@@ -64,9 +64,15 @@ def is_invalid_recursive_alias(seen_nodes: set[TypeAlias], target: Type) -> bool
         assert target.alias, f"Unfixed type alias {target.type_ref}"
         return is_invalid_recursive_alias(seen_nodes | {target.alias}, get_proper_type(target))
     assert isinstance(target, ProperType)
-    if not isinstance(target, UnionType):
+    if not isinstance(target, (UnionType, TupleType)):
         return False
-    return any(is_invalid_recursive_alias(seen_nodes, item) for item in target.items)
+    if isinstance(target, UnionType):
+        return any(is_invalid_recursive_alias(seen_nodes, item) for item in target.items)
+    for item in target.items:
+        if isinstance(item, UnpackType):
+            if is_invalid_recursive_alias(seen_nodes, item.type):
+                return True
+    return False
 
 
 def is_bad_type_type_item(item: Type) -> bool:
diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py
index 29c800140eec..bcb5e96b615c 100644
--- a/mypy/typevartuples.py
+++ b/mypy/typevartuples.py
@@ -9,25 +9,12 @@
     ProperType,
     Type,
     UnpackType,
+    find_unpack_in_list,
     get_proper_type,
     split_with_prefix_and_suffix,
 )
 
 
-def find_unpack_in_list(items: Sequence[Type]) -> int | None:
-    unpack_index: int | None = None
-    for i, item in enumerate(items):
-        if isinstance(item, UnpackType):
-            # We cannot fail here, so we must check this in an earlier
-            # semanal phase.
-            # Funky code here avoids mypyc narrowing the type of unpack_index.
-            old_index = unpack_index
-            assert old_index is None
-            # Don't return so that we can also sanity check there is only one.
-            unpack_index = i
-    return unpack_index
-
-
 def split_with_instance(
     typ: Instance,
 ) -> tuple[tuple[Type, ...], tuple[Type, ...], tuple[Type, ...]]:
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 95a7bdd2b2cd..93674c0c2d5c 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -3360,7 +3360,6 @@ class Foo(Generic[Unpack[Ts]]): ...
 class Bar(Generic[Unpack[Ts], T]): ...
 
 def dec(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], List[T]]: ...
-# TODO: do not crash on Foo[Us] (with missing Unpack), instead give an error.
 def f(*args: Unpack[Us]) -> Foo[Unpack[Us]]: ...
 reveal_type(dec(f))  # N: Revealed type is "def [Ts] (*Unpack[Ts`1]) -> builtins.list[__main__.Foo[Unpack[Ts`1]]]"
 g: Callable[[Unpack[Us]], Foo[Unpack[Us]]]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index b28b2ead45e7..58fc1265ae99 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -60,9 +60,10 @@ reveal_type(f(f_args2))  # N: Revealed type is "Tuple[builtins.str]"
 reveal_type(f(f_args3))  # N: Revealed type is "Tuple[builtins.str, builtins.str, builtins.bool]"
 f(empty)  # E: Argument 1 to "f" has incompatible type "Tuple[()]"; expected "Tuple[int]"
 f(bad_args)  # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[int, str]"
-# TODO: This hits a crash where we assert len(templates.items) == 1. See visit_tuple_type
-# in mypy/constraints.py.
-#f(var_len_tuple)
+
+# The reason for error in subtle: actual can be empty, formal cannot.
+reveal_type(f(var_len_tuple))  # N: Revealed type is "Tuple[builtins.str, Unpack[builtins.tuple[builtins.int, ...]]]" \
+                               # E: Argument 1 to "f" has incompatible type "Tuple[int, ...]"; expected "Tuple[int, Unpack[Tuple[int, ...]]]"
 
 g_args: Tuple[str, int]
 reveal_type(g(g_args))  # N: Revealed type is "Tuple[builtins.str, builtins.str]"
@@ -123,13 +124,10 @@ reveal_type(empty)  # N: Revealed type is "__main__.Variadic[Unpack[builtins.tup
 bad: Variadic[Unpack[Tuple[int, ...]], str, Unpack[Tuple[bool, ...]]]  # E: More than one Unpack in a type is not allowed
 reveal_type(bad)  # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[builtins.int, ...]], builtins.str]"
 
-# TODO: This is tricky to fix because we need typeanal to know whether the current
-# location is valid for an Unpack or not.
-# bad2: Unpack[Tuple[int, ...]]
+bad2: Unpack[Tuple[int, ...]]  # E: Unpack is only valid in a variadic position
 
 m1: Mixed1[int, str, bool]
 reveal_type(m1)  # N: Revealed type is "__main__.Mixed1[builtins.int, builtins.str, builtins.bool]"
-
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTupleGenericClassWithFunctions]
@@ -148,7 +146,6 @@ def foo(t: Variadic[int, Unpack[Ts], object]) -> Tuple[int, Unpack[Ts]]:
 
 v: Variadic[int, str, bool, object]
 reveal_type(foo(v))  # N: Revealed type is "Tuple[builtins.int, builtins.str, builtins.bool]"
-
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTupleGenericClassWithMethods]
@@ -168,7 +165,6 @@ class Variadic(Generic[T, Unpack[Ts], S]):
 
 v: Variadic[float, str, bool, object]
 reveal_type(v.foo(0))  # N: Revealed type is "Tuple[builtins.int, builtins.str, builtins.bool]"
-
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTupleIsNotValidAliasTarget]
@@ -211,8 +207,8 @@ shape = (Height(480), Width(640))
 x: Array[Height, Width] = Array(shape)
 reveal_type(abs(x))  # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]"
 reveal_type(x + x)  # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]"
-
 [builtins fixtures/tuple.pyi]
+
 [case testTypeVarTuplePep646ArrayExampleWithDType]
 from typing import Generic, Tuple, TypeVar, Protocol, NewType
 from typing_extensions import TypeVarTuple, Unpack
@@ -247,7 +243,6 @@ shape = (Height(480), Width(640))
 x: Array[float, Height, Width] = Array(shape)
 reveal_type(abs(x))  # N: Revealed type is "__main__.Array[builtins.float, __main__.Height, __main__.Width]"
 reveal_type(x + x)  # N: Revealed type is "__main__.Array[builtins.float, __main__.Height, __main__.Width]"
-
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTuplePep646ArrayExampleInfer]
@@ -293,8 +288,8 @@ c = del_batch_axis(b)
 reveal_type(c)  # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]"
 d = add_batch_channels(a)
 reveal_type(d)  # N: Revealed type is "__main__.Array[__main__.Batch, __main__.Height, __main__.Width, __main__.Channels]"
-
 [builtins fixtures/tuple.pyi]
+
 [case testTypeVarTuplePep646TypeVarConcatenation]
 from typing import Generic, TypeVar, NewType, Tuple
 from typing_extensions import TypeVarTuple, Unpack
@@ -311,6 +306,7 @@ def prefix_tuple(
 z = prefix_tuple(x=0, y=(True, 'a'))
 reveal_type(z)  # N: Revealed type is "Tuple[builtins.int, builtins.bool, builtins.str]"
 [builtins fixtures/tuple.pyi]
+
 [case testTypeVarTuplePep646TypeVarTupleUnpacking]
 from typing import Generic, TypeVar, NewType, Any, Tuple
 from typing_extensions import TypeVarTuple, Unpack
@@ -363,8 +359,6 @@ reveal_type(bad)  # N: Revealed type is "def [Ts, Ts2] (x: Tuple[builtins.int, U
 def bad2(x: Tuple[int, Unpack[Tuple[int, ...]], str, Unpack[Tuple[str, ...]]]) -> None:  # E: More than one Unpack in a type is not allowed
     ...
 reveal_type(bad2)  # N: Revealed type is "def (x: Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]], builtins.str])"
-
-
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTuplePep646TypeVarStarArgsBasic]
@@ -380,8 +374,8 @@ def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]:
     return args
 
 reveal_type(args_to_tuple(1, 'a'))  # N: Revealed type is "Tuple[Literal[1]?, Literal['a']?]"
-
 [builtins fixtures/tuple.pyi]
+
 [case testTypeVarTuplePep646TypeVarStarArgs]
 from typing import Tuple
 from typing_extensions import TypeVarTuple, Unpack
@@ -410,8 +404,6 @@ with_prefix_suffix(*bad_t)  # E: Too few arguments for "with_prefix_suffix"
 
 def foo(*args: Unpack[Ts]) -> None:
     reveal_type(with_prefix_suffix(True, "bar", *args, 5))  # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]"
-
-
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTuplePep646TypeVarStarArgsFixedLengthTuple]
@@ -422,17 +414,23 @@ def foo(*args: Unpack[Tuple[int, str]]) -> None:
     reveal_type(args)  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
 
 foo(0, "foo")
-foo(0, 1)  # E: Argument 2 to "foo" has incompatible type "int"; expected "Unpack[Tuple[int, str]]"
-foo("foo", "bar")  # E: Argument 1 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, str]]"
-foo(0, "foo", 1)  # E: Invalid number of arguments
-foo(0)  # E: Invalid number of arguments
-foo()  # E: Invalid number of arguments
+foo(0, 1)  # E: Argument 2 to "foo" has incompatible type "int"; expected "str"
+foo("foo", "bar")  # E: Argument 1 to "foo" has incompatible type "str"; expected "int"
+foo(0, "foo", 1)  # E: Too many arguments for "foo"
+foo(0)  # E: Too few arguments for "foo"
+foo()  # E: Too few arguments for "foo"
 foo(*(0, "foo"))
 
-# TODO: fix this case to do something sensible.
-#def foo2(*args: Unpack[Tuple[bool, Unpack[Tuple[int, str]], bool]]) -> None:
-#    reveal_type(args)
+def foo2(*args: Unpack[Tuple[bool, Unpack[Tuple[int, str]], bool]]) -> None:
+    reveal_type(args)  # N: Revealed type is "Tuple[builtins.bool, builtins.int, builtins.str, builtins.bool]"
+
+# It is hard to normalize callable types in definition, because there is deep relation between `FuncDef.type`
+# and `FuncDef.arguments`, therefore various typeops need to be sure to normalize Callable types before using them.
+reveal_type(foo2)  # N: Revealed type is "def (*args: Unpack[Tuple[builtins.bool, builtins.int, builtins.str, builtins.bool]])"
 
+class C:
+    def foo2(self, *args: Unpack[Tuple[bool, Unpack[Tuple[int, str]], bool]]) -> None: ...
+reveal_type(C().foo2)  # N: Revealed type is "def (*args: Unpack[Tuple[builtins.bool, builtins.int, builtins.str, builtins.bool]])"
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTuplePep646TypeVarStarArgsVariableLengthTuple]
@@ -443,8 +441,7 @@ def foo(*args: Unpack[Tuple[int, ...]]) -> None:
     reveal_type(args)  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
 
 foo(0, 1, 2)
-# TODO: this should say 'expected "int"' rather than the unpack
-foo(0, 1, "bar")  # E: Argument 3 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, ...]]"
+foo(0, 1, "bar")  # E: Argument 3 to "foo" has incompatible type "str"; expected "int"
 
 
 def foo2(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]) -> None:
@@ -453,9 +450,9 @@ def foo2(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]) -> None
     # reveal_type(args[1])
 
 foo2("bar", 1, 2, 3, False, True)
-foo2(0, 1, 2, 3, False, True)  # E: Argument 1 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]"
-foo2("bar", "bar", 2, 3, False, True)  # E: Argument 2 to "foo2" has incompatible type "str"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]"
-foo2("bar", 1, 2, 3, 4, True)  # E: Argument 5 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]"
+foo2(0, 1, 2, 3, False, True)  # E: Argument 1 to "foo2" has incompatible type "int"; expected "str"
+foo2("bar", "bar", 2, 3, False, True)  # E: Argument 2 to "foo2" has incompatible type "str"; expected "Unpack[Tuple[Unpack[Tuple[int, ...]], bool, bool]]"
+foo2("bar", 1, 2, 3, 4, True)  # E: Argument 5 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[Unpack[Tuple[int, ...]], bool, bool]]"
 foo2(*("bar", 1, 2, 3, False, True))
 [builtins fixtures/tuple.pyi]
 
@@ -550,8 +547,7 @@ def call(
     *args: Unpack[Ts],
 ) -> None:
     ...
-    # TODO: exposes unhandled case in checkexpr
-    # target(*args)
+    target(*args)
 
 class A:
     def func(self, arg1: int, arg2: str) -> None: ...
@@ -569,7 +565,6 @@ call(A().func, 0, 1)  # E: Argument 1 to "call" has incompatible type "Callable[
 call(A().func2, 0, 0)
 call(A().func3, 0, 1, 2)
 call(A().func3)
-
 [builtins fixtures/tuple.pyi]
 
 [case testVariadicAliasBasicTuple]
@@ -805,3 +800,125 @@ reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple
 y: A[Unpack[Tuple[bool, ...]]]
 reveal_type(y)  # N: Revealed type is "Tuple[builtins.bool, Unpack[builtins.tuple[builtins.bool, ...]], builtins.bool, builtins.bool]"
 [builtins fixtures/tuple.pyi]
+
+[case testBanPathologicalRecursiveTuples]
+from typing import Tuple
+from typing_extensions import Unpack
+A = Tuple[int, Unpack[A]]  # E: Invalid recursive alias: a tuple item of itself
+B = Tuple[int, Unpack[C]]  # E: Invalid recursive alias: a tuple item of itself \
+                           # E: Name "C" is used before definition
+C = Tuple[int, Unpack[B]]
+x: A
+y: B
+z: C
+reveal_type(x)  # N: Revealed type is "Any"
+reveal_type(y)  # N: Revealed type is "Any"
+reveal_type(z)  # N: Revealed type is "Tuple[builtins.int, Unpack[Any]]"
+[builtins fixtures/tuple.pyi]
+
+[case testInferenceAgainstGenericVariadicWithBadType]
+# flags: --new-type-inference
+from typing import TypeVar, Callable, Generic
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+Ts = TypeVarTuple("Ts")
+Us = TypeVarTuple("Us")
+
+class Foo(Generic[Unpack[Ts]]): ...
+
+def dec(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], T]: ...
+def f(*args: Unpack[Us]) -> Foo[Us]: ...  # E: TypeVarTuple "Us" is only valid with an unpack
+dec(f)  # No crash
+[builtins fixtures/tuple.pyi]
+
+[case testHomogeneousGenericTupleUnpackInferenceNoCrash1]
+from typing import Any, TypeVar, Tuple, Type, Optional
+from typing_extensions import Unpack
+
+T = TypeVar("T")
+def convert(obj: Any, *to_classes: Unpack[Tuple[Type[T], ...]]) -> Optional[T]:
+    ...
+
+x = convert(1, int, float)
+reveal_type(x)  # N: Revealed type is "Union[builtins.float, None]"
+[builtins fixtures/tuple.pyi]
+
+[case testHomogeneousGenericTupleUnpackInferenceNoCrash2]
+from typing import TypeVar, Tuple, Callable, Iterable
+from typing_extensions import Unpack
+
+T = TypeVar("T")
+def combine(x: T, y: T) -> T: ...
+def reduce(fn: Callable[[T, T], T], xs: Iterable[T]) -> T: ...
+
+def pipeline(*xs: Unpack[Tuple[int, Unpack[Tuple[str, ...]], bool]]) -> None:
+    reduce(combine, xs)
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicStarArgsCallNoCrash]
+from typing import TypeVar, Callable, Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+X = TypeVar("X")
+Y = TypeVar("Y")
+Xs = TypeVarTuple("Xs")
+Ys = TypeVarTuple("Ys")
+
+def nil() -> Tuple[()]:
+    return ()
+
+def cons(
+    f: Callable[[X], Y],
+    g: Callable[[Unpack[Xs]], Tuple[Unpack[Ys]]],
+) -> Callable[[X, Unpack[Xs]], Tuple[Y, Unpack[Ys]]]:
+    def wrapped(x: X, *xs: Unpack[Xs]) -> Tuple[Y, Unpack[Ys]]:
+        y, ys = f(x), g(*xs)
+        return y, *ys
+    return wrapped
+
+def star(f: Callable[[X], Y]) -> Callable[[Unpack[Tuple[X, ...]]], Tuple[Y, ...]]:
+    def wrapped(*xs: X):
+        if not xs:
+            return nil()
+        return cons(f, star(f))(*xs)
+    return wrapped
+[builtins fixtures/tuple.pyi]
+
+[case testInvalidTypeVarTupleUseNoCrash]
+from typing_extensions import TypeVarTuple
+
+Ts = TypeVarTuple("Ts")
+
+def f(x: Ts) -> Ts:  # E: TypeVarTuple "Ts" is only valid with an unpack
+    return x
+
+v = f(1, 2, "A")  # E: Too many arguments for "f"
+reveal_type(v)  # N: Revealed type is "Any"
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleSimpleDecoratorWorks]
+from typing import TypeVar, Callable
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+T = TypeVar("T")
+
+def decorator(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], T]:
+    def wrapper(*args: Unpack[Ts]) -> T:
+        return f(*args)
+    return wrapper
+
+@decorator
+def f(a: int, b: int) -> int: ...
+reveal_type(f)  # N: Revealed type is "def (builtins.int, builtins.int) -> builtins.int"
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithUnpackIterator]
+from typing import Tuple
+from typing_extensions import Unpack
+
+def pipeline(*xs: Unpack[Tuple[int, Unpack[Tuple[float, ...]], bool]]) -> None:
+    for x in xs:
+        reveal_type(x)  # N: Revealed type is "builtins.float"
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
index 6e118597551f..fe09fb43c97c 100644
--- a/test-data/unit/check-varargs.test
+++ b/test-data/unit/check-varargs.test
@@ -775,7 +775,7 @@ class Person(TypedDict):
     name: str
     age: int
 
-def foo(x: Unpack[Person]) -> None:  # E: "Person" cannot be unpacked (must be tuple or TypeVarTuple)
+def foo(x: Unpack[Person]) -> None:  # E: Unpack is only valid in a variadic position
     ...
 def bar(x: int, *args: Unpack[Person]) -> None:  # E: "Person" cannot be unpacked (must be tuple or TypeVarTuple)
     ...
diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test
index 09d4da54bff3..f21ba5253437 100644
--- a/test-data/unit/semanal-errors.test
+++ b/test-data/unit/semanal-errors.test
@@ -1457,7 +1457,7 @@ homogenous_tuple: Tuple[Unpack[Tuple[int, ...]]]
 bad: Tuple[Unpack[int]]  # E: "int" cannot be unpacked (must be tuple or TypeVarTuple)
 [builtins fixtures/tuple.pyi]
 
-[case testTypeVarTuple]
+[case testTypeVarTupleErrors]
 from typing import Generic
 from typing_extensions import TypeVarTuple, Unpack
 
@@ -1471,15 +1471,14 @@ TP5 = TypeVarTuple(t='TP5')  # E: TypeVarTuple() expects a string literal as fir
 TP6 = TypeVarTuple('TP6', bound=int)  # E: Unexpected keyword argument "bound" for "TypeVarTuple"
 
 x: TVariadic  # E: TypeVarTuple "TVariadic" is unbound
-y: Unpack[TVariadic]  # E: TypeVarTuple "TVariadic" is unbound
+y: Unpack[TVariadic]  # E: Unpack is only valid in a variadic position
 
 
 class Variadic(Generic[Unpack[TVariadic], Unpack[TVariadic2]]):  # E: Can only use one type var tuple in a class def
     pass
 
-# TODO: this should generate an error
-#def bad_args(*args: TVariadic):
-#    pass
+def bad_args(*args: TVariadic):  # E: TypeVarTuple "TVariadic" is only valid with an unpack
+    pass
 
 def bad_kwargs(**kwargs: Unpack[TVariadic]):  # E: Unpack item in ** argument must be a TypedDict
     pass

From 0b303b53479897e24d57affef6a8cdfffbd08e3d Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Thu, 24 Aug 2023 00:47:22 +0100
Subject: [PATCH 071/288] stubtest: error if typeshed is missing modules from
 the stdlib (#15729)

We currently flag modules missing from third-party stubs in stubtest,
but don't do similarly for typeshed's stdlib stubs. This PR adds that
functionality for typeshed's stdlib stubs as well.
---
 mypy/stubtest.py | 83 ++++++++++++++++++++++++++++++++++++++++++------
 1 file changed, 74 insertions(+), 9 deletions(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index b2506e6dcc02..d8a613034b3a 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -11,6 +11,7 @@
 import copy
 import enum
 import importlib
+import importlib.machinery
 import inspect
 import os
 import pkgutil
@@ -25,7 +26,7 @@
 from contextlib import redirect_stderr, redirect_stdout
 from functools import singledispatch
 from pathlib import Path
-from typing import Any, Generic, Iterator, TypeVar, Union
+from typing import AbstractSet, Any, Generic, Iterator, TypeVar, Union
 from typing_extensions import get_origin, is_typeddict
 
 import mypy.build
@@ -1639,7 +1640,7 @@ def get_stub(module: str) -> nodes.MypyFile | None:
 
 def get_typeshed_stdlib_modules(
     custom_typeshed_dir: str | None, version_info: tuple[int, int] | None = None
-) -> list[str]:
+) -> set[str]:
     """Returns a list of stdlib modules in typeshed (for current Python version)."""
     stdlib_py_versions = mypy.modulefinder.load_stdlib_py_versions(custom_typeshed_dir)
     if version_info is None:
@@ -1661,14 +1662,75 @@ def exists_in_version(module: str) -> bool:
         typeshed_dir = Path(mypy.build.default_data_dir()) / "typeshed"
     stdlib_dir = typeshed_dir / "stdlib"
 
-    modules = []
+    modules: set[str] = set()
     for path in stdlib_dir.rglob("*.pyi"):
         if path.stem == "__init__":
             path = path.parent
         module = ".".join(path.relative_to(stdlib_dir).parts[:-1] + (path.stem,))
         if exists_in_version(module):
-            modules.append(module)
-    return sorted(modules)
+            modules.add(module)
+    return modules
+
+
+def get_importable_stdlib_modules() -> set[str]:
+    """Return all importable stdlib modules at runtime."""
+    all_stdlib_modules: AbstractSet[str]
+    if sys.version_info >= (3, 10):
+        all_stdlib_modules = sys.stdlib_module_names
+    else:
+        all_stdlib_modules = set(sys.builtin_module_names)
+        python_exe_dir = Path(sys.executable).parent
+        for m in pkgutil.iter_modules():
+            finder = m.module_finder
+            if isinstance(finder, importlib.machinery.FileFinder):
+                finder_path = Path(finder.path)
+                if (
+                    python_exe_dir in finder_path.parents
+                    and "site-packages" not in finder_path.parts
+                ):
+                    all_stdlib_modules.add(m.name)
+
+    importable_stdlib_modules: set[str] = set()
+    for module_name in all_stdlib_modules:
+        if module_name in ANNOYING_STDLIB_MODULES:
+            continue
+
+        try:
+            runtime = silent_import_module(module_name)
+        except ImportError:
+            continue
+        else:
+            importable_stdlib_modules.add(module_name)
+
+        try:
+            # some stdlib modules (e.g. `nt`) don't have __path__ set...
+            runtime_path = runtime.__path__
+            runtime_name = runtime.__name__
+        except AttributeError:
+            continue
+
+        for submodule in pkgutil.walk_packages(runtime_path, runtime_name + "."):
+            submodule_name = submodule.name
+
+            # There are many annoying *.__main__ stdlib modules,
+            # and including stubs for them isn't really that useful anyway:
+            # tkinter.__main__ opens a tkinter windows; unittest.__main__ raises SystemExit; etc.
+            #
+            # The idlelib.* submodules are similarly annoying in opening random tkinter windows,
+            # and we're unlikely to ever add stubs for idlelib in typeshed
+            # (see discussion in https://github.com/python/typeshed/pull/9193)
+            if submodule_name.endswith(".__main__") or submodule_name.startswith("idlelib."):
+                continue
+
+            try:
+                silent_import_module(submodule_name)
+            # importing multiprocessing.popen_forkserver on Windows raises AttributeError...
+            except Exception:
+                continue
+            else:
+                importable_stdlib_modules.add(submodule_name)
+
+    return importable_stdlib_modules
 
 
 def get_allowlist_entries(allowlist_file: str) -> Iterator[str]:
@@ -1699,6 +1761,10 @@ class _Arguments:
     version: str
 
 
+# typeshed added a stub for __main__, but that causes stubtest to check itself
+ANNOYING_STDLIB_MODULES: typing_extensions.Final = frozenset({"antigravity", "this", "__main__"})
+
+
 def test_stubs(args: _Arguments, use_builtins_fixtures: bool = False) -> int:
     """This is stubtest! It's time to test the stubs!"""
     # Load the allowlist. This is a series of strings corresponding to Error.object_desc
@@ -1721,10 +1787,9 @@ def test_stubs(args: _Arguments, use_builtins_fixtures: bool = False) -> int:
                 "cannot pass both --check-typeshed and a list of modules",
             )
             return 1
-        modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir)
-        # typeshed added a stub for __main__, but that causes stubtest to check itself
-        annoying_modules = {"antigravity", "this", "__main__"}
-        modules = [m for m in modules if m not in annoying_modules]
+        typeshed_modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir)
+        runtime_modules = get_importable_stdlib_modules()
+        modules = sorted((typeshed_modules | runtime_modules) - ANNOYING_STDLIB_MODULES)
 
     if not modules:
         print(_style("error:", color="red", bold=True), "no modules to check")

From 4077dc6c4b87b273bfd4552d75faaafa6c016c25 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Thu, 24 Aug 2023 08:29:00 +0100
Subject: [PATCH 072/288] stubtest: fix edge case for bytes enum subclasses
 (#15943)

---
 mypy/stubtest.py          |  6 +++---
 mypy/test/teststubtest.py | 20 ++++++++++++++++++++
 2 files changed, 23 insertions(+), 3 deletions(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index d8a613034b3a..34bb985b702e 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -1552,10 +1552,10 @@ def anytype() -> mypy.types.AnyType:
     fallback = mypy.types.Instance(type_info, [anytype() for _ in type_info.type_vars])
 
     value: bool | int | str
-    if isinstance(runtime, bytes):
-        value = bytes_to_human_readable_repr(runtime)
-    elif isinstance(runtime, enum.Enum) and isinstance(runtime.name, str):
+    if isinstance(runtime, enum.Enum) and isinstance(runtime.name, str):
         value = runtime.name
+    elif isinstance(runtime, bytes):
+        value = bytes_to_human_readable_repr(runtime)
     elif isinstance(runtime, (bool, int, str)):
         value = runtime
     else:
diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py
index a6733a9e8bd0..a52d9ef5de31 100644
--- a/mypy/test/teststubtest.py
+++ b/mypy/test/teststubtest.py
@@ -1068,6 +1068,26 @@ def spam(x=Flags4(0)): pass
             """,
             error="spam",
         )
+        yield Case(
+            stub="""
+            from typing_extensions import Final, Literal
+            class BytesEnum(bytes, enum.Enum):
+                a: bytes
+            FOO: Literal[BytesEnum.a]
+            BAR: Final = BytesEnum.a
+            BAZ: BytesEnum
+            EGGS: bytes
+            """,
+            runtime="""
+            class BytesEnum(bytes, enum.Enum):
+                a = b'foo'
+            FOO = BytesEnum.a
+            BAR = BytesEnum.a
+            BAZ = BytesEnum.a
+            EGGS = BytesEnum.a
+            """,
+            error=None,
+        )
 
     @collect_cases
     def test_decorator(self) -> Iterator[Case]:

From dc7344539bd6e40825486dfaaa3d0bc34a64784e Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Thu, 24 Aug 2023 19:56:13 +0300
Subject: [PATCH 073/288] Do not advertise to create your own `assert_never`
 helper (#15947)

---
 docs/source/literal_types.rst | 10 ++--------
 1 file changed, 2 insertions(+), 8 deletions(-)

diff --git a/docs/source/literal_types.rst b/docs/source/literal_types.rst
index a66d300bd0fd..283bf7f9dba1 100644
--- a/docs/source/literal_types.rst
+++ b/docs/source/literal_types.rst
@@ -329,13 +329,10 @@ perform an exhaustiveness check, you need to update your code to use an
 .. code-block:: python
 
   from typing import Literal, NoReturn
+  from typing_extensions import assert_never
 
   PossibleValues = Literal['one', 'two']
 
-  def assert_never(value: NoReturn) -> NoReturn:
-      # This also works at runtime as well
-      assert False, f'This code should never be reached, got: {value}'
-
   def validate(x: PossibleValues) -> bool:
       if x == 'one':
           return True
@@ -443,10 +440,7 @@ Let's start with a definition:
 
   from enum import Enum
   from typing import NoReturn
-
-  def assert_never(value: NoReturn) -> NoReturn:
-      # This also works in runtime as well:
-      assert False, f'This code should never be reached, got: {value}'
+  from typing_extensions import assert_never
 
   class Direction(Enum):
       up = 'up'

From 351371d20c0b9e014528238761a6eeedf8dfb926 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 24 Aug 2023 20:10:47 +0100
Subject: [PATCH 074/288] Fix type arguments validation for variadic instances
 (#15944)

Fixes https://github.com/python/mypy/issues/15410
Fixes https://github.com/python/mypy/issues/15411
---
 mypy/expandtype.py                      |  8 +--
 mypy/semanal_typeargs.py                | 23 +++++++-
 mypy/test/testtypes.py                  |  2 +-
 mypy/typeanal.py                        |  8 ++-
 mypy/types.py                           |  1 +
 test-data/unit/check-typevar-tuple.test | 78 +++++++++++++++++++++++++
 6 files changed, 112 insertions(+), 8 deletions(-)

diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index e71f6429d9c0..dc3dae670c1f 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -409,10 +409,10 @@ def visit_tuple_type(self, t: TupleType) -> Type:
                 # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...]
                 item = items[0]
                 if isinstance(item, UnpackType):
-                    assert isinstance(item.type, ProperType)
-                    if isinstance(item.type, Instance):
-                        assert item.type.type.fullname == "builtins.tuple"
-                        return item.type
+                    unpacked = get_proper_type(item.type)
+                    if isinstance(unpacked, Instance):
+                        assert unpacked.type.fullname == "builtins.tuple"
+                        return unpacked
             fallback = t.partial_fallback.accept(self)
             assert isinstance(fallback, ProperType) and isinstance(fallback, Instance)
             return t.copy_modified(items=items, fallback=fallback)
diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py
index 8d8ef66b5c69..1a37ac57be30 100644
--- a/mypy/semanal_typeargs.py
+++ b/mypy/semanal_typeargs.py
@@ -18,7 +18,7 @@
 from mypy.options import Options
 from mypy.scope import Scope
 from mypy.subtypes import is_same_type, is_subtype
-from mypy.typeanal import set_any_tvars
+from mypy.typeanal import fix_type_var_tuple_argument, set_any_tvars
 from mypy.types import (
     AnyType,
     CallableType,
@@ -143,7 +143,26 @@ def visit_instance(self, t: Instance) -> None:
         if isinstance(info, FakeInfo):
             return  # https://github.com/python/mypy/issues/11079
         t.args = tuple(flatten_nested_tuples(t.args))
-        # TODO: fix #15410 and #15411.
+        if t.type.has_type_var_tuple_type:
+            # Regular Instances are already validated in typeanal.py.
+            # TODO: do something with partial overlap (probably just reject).
+            # also in other places where split_with_prefix_and_suffix() is used.
+            correct = len(t.args) >= len(t.type.type_vars) - 1
+            if any(
+                isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance)
+                for a in t.args
+            ):
+                correct = True
+            if not correct:
+                exp_len = f"at least {len(t.type.type_vars) - 1}"
+                self.fail(
+                    f"Bad number of arguments, expected: {exp_len}, given: {len(t.args)}",
+                    t,
+                    code=codes.TYPE_ARG,
+                )
+                any_type = AnyType(TypeOfAny.from_error)
+                t.args = (any_type,) * len(t.type.type_vars)
+                fix_type_var_tuple_argument(any_type, t)
         self.validate_args(info.name, t.args, info.defn.type_vars, t)
         super().visit_instance(t)
 
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
index 56ac86058ce4..12e7b207b00a 100644
--- a/mypy/test/testtypes.py
+++ b/mypy/test/testtypes.py
@@ -1464,7 +1464,7 @@ def make_call(*items: tuple[str, str | None]) -> CallExpr:
 class TestExpandTypeLimitGetProperType(TestCase):
     # WARNING: do not increase this number unless absolutely necessary,
     # and you understand what you are doing.
-    ALLOWED_GET_PROPER_TYPES = 6
+    ALLOWED_GET_PROPER_TYPES = 7
 
     @skipUnless(mypy.expandtype.__file__.endswith(".py"), "Skip for compiled mypy")
     def test_count_get_proper_type(self) -> None:
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 14b37539afea..806b9967039e 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -1795,6 +1795,13 @@ def fix_instance(
         fix_type_var_tuple_argument(any_type, t)
 
         return
+
+    if t.type.has_type_var_tuple_type:
+        # This can be only correctly analyzed when all arguments are fully
+        # analyzed, because there may be a variadic item among them, so we
+        # do this in semanal_typeargs.py.
+        return
+
     # Invalid number of type parameters.
     fail(
         wrong_type_arg_count(len(t.type.type_vars), str(len(t.args)), t.type.name),
@@ -1805,7 +1812,6 @@ def fix_instance(
     # otherwise the type checker may crash as it expects
     # things to be right.
     t.args = tuple(AnyType(TypeOfAny.from_error) for _ in t.type.type_vars)
-    fix_type_var_tuple_argument(AnyType(TypeOfAny.from_error), t)
     t.invalid = True
 
 
diff --git a/mypy/types.py b/mypy/types.py
index c71412f4ea58..214978eab774 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -322,6 +322,7 @@ def _expand_once(self) -> Type:
             assert isinstance(self.alias.target, Instance)  # type: ignore[misc]
             return self.alias.target.copy_modified(args=self.args)
 
+        # TODO: this logic duplicates the one in expand_type_by_instance().
         if self.alias.tvar_tuple_index is None:
             mapping = {v.id: s for (v, s) in zip(self.alias.alias_tvars, self.args)}
         else:
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 58fc1265ae99..ee81597edadf 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -922,3 +922,81 @@ def pipeline(*xs: Unpack[Tuple[int, Unpack[Tuple[float, ...]], bool]]) -> None:
     for x in xs:
         reveal_type(x)  # N: Revealed type is "builtins.float"
 [builtins fixtures/tuple.pyi]
+
+[case testFixedUnpackItemInInstanceArguments]
+from typing import TypeVar, Callable, Tuple, Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+
+class C(Generic[T, Unpack[Ts], S]):
+    prefix: T
+    suffix: S
+    middle: Tuple[Unpack[Ts]]
+
+Ints = Tuple[int, int]
+c: C[Unpack[Ints]]
+reveal_type(c.prefix)  # N: Revealed type is "builtins.int"
+reveal_type(c.suffix)  # N: Revealed type is "builtins.int"
+reveal_type(c.middle)  # N: Revealed type is "Tuple[()]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicUnpackItemInInstanceArguments]
+from typing import TypeVar, Callable, Tuple, Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+
+class Other(Generic[Unpack[Ts]]): ...
+class C(Generic[T, Unpack[Ts], S]):
+    prefix: T
+    suffix: S
+    x: Tuple[Unpack[Ts]]
+    y: Callable[[Unpack[Ts]], None]
+    z: Other[Unpack[Ts]]
+
+Ints = Tuple[int, ...]
+c: C[Unpack[Ints]]
+reveal_type(c.prefix)  # N: Revealed type is "builtins.int"
+reveal_type(c.suffix)  # N: Revealed type is "builtins.int"
+reveal_type(c.x)  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+reveal_type(c.y)  # N: Revealed type is "def (*builtins.int)"
+reveal_type(c.z)  # N: Revealed type is "__main__.Other[Unpack[builtins.tuple[builtins.int, ...]]]"
+[builtins fixtures/tuple.pyi]
+
+[case testTooFewItemsInInstanceArguments]
+from typing import Generic, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+class C(Generic[T, Unpack[Ts], S]): ...
+
+c: C[int]  # E: Bad number of arguments, expected: at least 2, given: 1
+reveal_type(c)  # N: Revealed type is "__main__.C[Any, Unpack[builtins.tuple[Any, ...]], Any]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicClassUpperBoundCheck]
+from typing import Tuple, TypeVar, Generic
+from typing_extensions import Unpack, TypeVarTuple
+
+class A: ...
+class B: ...
+class C: ...
+class D: ...
+
+T = TypeVar("T", bound=int)
+S = TypeVar("S", bound=str)
+Ts = TypeVarTuple("Ts")
+
+class G(Generic[T, Unpack[Ts], S]): ...
+First = Tuple[A, B]
+Second = Tuple[C, D]
+x: G[Unpack[First], Unpack[Second]]  # E: Type argument "A" of "G" must be a subtype of "int" \
+                                     # E: Type argument "D" of "G" must be a subtype of "str"
+[builtins fixtures/tuple.pyi]

From 9e1f4df133e155f213cf3714bf796bb9e8698907 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 24 Aug 2023 20:11:24 +0100
Subject: [PATCH 075/288] Use TypeVar refresh uniformly for class object access
 (#15945)

Fixes https://github.com/python/mypy/issues/15934

I think this is a right thing to do, it may even fix some other rare
accidental `TypeVar` clashes not involving self-types. This causes a bit
of churn in tests, but not too much.
---
 mypy/checkmember.py                           |  4 +--
 test-data/unit/check-classes.test             |  2 +-
 test-data/unit/check-incremental.test         | 26 +++++++++---------
 .../unit/check-parameter-specification.test   |  8 +++---
 test-data/unit/check-plugin-attrs.test        | 16 +++++------
 test-data/unit/check-selftype.test            | 27 ++++++++++++++++---
 6 files changed, 52 insertions(+), 31 deletions(-)

diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 2b0717f181a9..1bdc00a6eb59 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -1198,12 +1198,12 @@ class B(A[str]): pass
     # (i.e. appear in the return type of the class object on which the method was accessed).
     if isinstance(t, CallableType):
         tvars = original_vars if original_vars is not None else []
+        t = freshen_all_functions_type_vars(t)
         if is_classmethod:
-            t = freshen_all_functions_type_vars(t)
             t = bind_self(t, original_type, is_classmethod=True)
             assert isuper is not None
             t = expand_type_by_instance(t, isuper)
-            freeze_all_type_vars(t)
+        freeze_all_type_vars(t)
         return t.copy_modified(variables=list(tvars) + list(t.variables))
     elif isinstance(t, Overloaded):
         return Overloaded(
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
index b9e65ef4ad20..04b51bb603c5 100644
--- a/test-data/unit/check-classes.test
+++ b/test-data/unit/check-classes.test
@@ -1162,7 +1162,7 @@ def test() -> None:
             reveal_type(x)  # N: Revealed type is "T`-1"
             reveal_type(x.returns_int())  # N: Revealed type is "builtins.int"
             return foo
-    reveal_type(Foo.bar)  # N: Revealed type is "def [T <: __main__.Foo@5] (self: __main__.Foo@5, foo: T`-1) -> T`-1"
+    reveal_type(Foo.bar)  # N: Revealed type is "def [T <: __main__.Foo@5] (self: __main__.Foo@5, foo: T`1) -> T`1"
 
 [case testGenericClassWithInvalidTypevarUseWithinFunction]
 from typing import TypeVar
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index 80f5e4e7d12d..fcab0545b982 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -3035,10 +3035,10 @@ main:15: error: Unsupported left operand type for >= ("NoCmp")
 [case testAttrsIncrementalDunder]
 from a import A
 reveal_type(A)  # N: Revealed type is "def (a: builtins.int) -> a.A"
-reveal_type(A.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-reveal_type(A.__le__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-reveal_type(A.__gt__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-reveal_type(A.__ge__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+reveal_type(A.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`3, other: _AT`3) -> builtins.bool"
+reveal_type(A.__le__)  # N: Revealed type is "def [_AT] (self: _AT`4, other: _AT`4) -> builtins.bool"
+reveal_type(A.__gt__)  # N: Revealed type is "def [_AT] (self: _AT`5, other: _AT`5) -> builtins.bool"
+reveal_type(A.__ge__)  # N: Revealed type is "def [_AT] (self: _AT`6, other: _AT`6) -> builtins.bool"
 
 A(1) < A(2)
 A(1) <= A(2)
@@ -3072,10 +3072,10 @@ class A:
 [stale]
 [out2]
 main:2: note: Revealed type is "def (a: builtins.int) -> a.A"
-main:3: note: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-main:4: note: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-main:5: note: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-main:6: note: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+main:3: note: Revealed type is "def [_AT] (self: _AT`1, other: _AT`1) -> builtins.bool"
+main:4: note: Revealed type is "def [_AT] (self: _AT`2, other: _AT`2) -> builtins.bool"
+main:5: note: Revealed type is "def [_AT] (self: _AT`3, other: _AT`3) -> builtins.bool"
+main:6: note: Revealed type is "def [_AT] (self: _AT`4, other: _AT`4) -> builtins.bool"
 main:15: error: Unsupported operand types for < ("A" and "int")
 main:16: error: Unsupported operand types for <= ("A" and "int")
 main:17: error: Unsupported operand types for > ("A" and "int")
@@ -3963,10 +3963,10 @@ class A:
 tmp/b.py:3: note: Revealed type is "def (a: builtins.int) -> a.A"
 tmp/b.py:4: note: Revealed type is "def (builtins.object, builtins.object) -> builtins.bool"
 tmp/b.py:5: note: Revealed type is "def (builtins.object, builtins.object) -> builtins.bool"
-tmp/b.py:6: note: Revealed type is "def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool"
-tmp/b.py:7: note: Revealed type is "def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool"
-tmp/b.py:8: note: Revealed type is "def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool"
-tmp/b.py:9: note: Revealed type is "def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool"
+tmp/b.py:6: note: Revealed type is "def [_DT] (self: _DT`1, other: _DT`1) -> builtins.bool"
+tmp/b.py:7: note: Revealed type is "def [_DT] (self: _DT`2, other: _DT`2) -> builtins.bool"
+tmp/b.py:8: note: Revealed type is "def [_DT] (self: _DT`3, other: _DT`3) -> builtins.bool"
+tmp/b.py:9: note: Revealed type is "def [_DT] (self: _DT`4, other: _DT`4) -> builtins.bool"
 tmp/b.py:18: error: Unsupported operand types for < ("A" and "int")
 tmp/b.py:19: error: Unsupported operand types for <= ("A" and "int")
 tmp/b.py:20: error: Unsupported operand types for > ("A" and "int")
@@ -6325,7 +6325,7 @@ reveal_type(D.meth)
 reveal_type(D().meth)
 [out]
 [out2]
-tmp/m.py:4: note: Revealed type is "def [Self <: lib.C] (self: Self`0, other: Self`0) -> Self`0"
+tmp/m.py:4: note: Revealed type is "def [Self <: lib.C] (self: Self`1, other: Self`1) -> Self`1"
 tmp/m.py:5: note: Revealed type is "def (other: m.D) -> m.D"
 
 [case testIncrementalNestedGenericCallableCrash]
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index dee8a971f925..dba73974aef6 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -901,8 +901,8 @@ class A:
     def func(self, action: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R:
         ...
 
-reveal_type(A.func)  # N: Revealed type is "def [_P, _R] (self: __main__.A, action: def (*_P.args, **_P.kwargs) -> _R`-2, *_P.args, **_P.kwargs) -> _R`-2"
-reveal_type(A().func)  # N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`5, *_P.args, **_P.kwargs) -> _R`5"
+reveal_type(A.func)  # N: Revealed type is "def [_P, _R] (self: __main__.A, action: def (*_P.args, **_P.kwargs) -> _R`3, *_P.args, **_P.kwargs) -> _R`3"
+reveal_type(A().func)  # N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`7, *_P.args, **_P.kwargs) -> _R`7"
 
 def f(x: int) -> int:
     ...
@@ -934,8 +934,8 @@ class A:
     def func(self, action: Job[_P, None]) -> Job[_P, None]:
         ...
 
-reveal_type(A.func)  # N: Revealed type is "def [_P] (self: __main__.A, action: __main__.Job[_P`-1, None]) -> __main__.Job[_P`-1, None]"
-reveal_type(A().func)  # N: Revealed type is "def [_P] (action: __main__.Job[_P`3, None]) -> __main__.Job[_P`3, None]"
+reveal_type(A.func)  # N: Revealed type is "def [_P] (self: __main__.A, action: __main__.Job[_P`2, None]) -> __main__.Job[_P`2, None]"
+reveal_type(A().func)  # N: Revealed type is "def [_P] (action: __main__.Job[_P`4, None]) -> __main__.Job[_P`4, None]"
 reveal_type(A().func(Job(lambda x: x)))  # N: Revealed type is "__main__.Job[[x: Any], None]"
 
 def f(x: int, y: int) -> None: ...
diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test
index 3534d206c060..7580531bebc9 100644
--- a/test-data/unit/check-plugin-attrs.test
+++ b/test-data/unit/check-plugin-attrs.test
@@ -185,10 +185,10 @@ from attr import attrib, attrs
 class A:
     a: int
 reveal_type(A)  # N: Revealed type is "def (a: builtins.int) -> __main__.A"
-reveal_type(A.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-reveal_type(A.__le__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-reveal_type(A.__gt__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-reveal_type(A.__ge__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+reveal_type(A.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`3, other: _AT`3) -> builtins.bool"
+reveal_type(A.__le__)  # N: Revealed type is "def [_AT] (self: _AT`4, other: _AT`4) -> builtins.bool"
+reveal_type(A.__gt__)  # N: Revealed type is "def [_AT] (self: _AT`5, other: _AT`5) -> builtins.bool"
+reveal_type(A.__ge__)  # N: Revealed type is "def [_AT] (self: _AT`6, other: _AT`6) -> builtins.bool"
 
 A(1) < A(2)
 A(1) <= A(2)
@@ -989,10 +989,10 @@ class C(A, B): pass
 @attr.s
 class D(A): pass
 
-reveal_type(A.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-reveal_type(B.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-reveal_type(C.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
-reveal_type(D.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool"
+reveal_type(A.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`5, other: _AT`5) -> builtins.bool"
+reveal_type(B.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`6, other: _AT`6) -> builtins.bool"
+reveal_type(C.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`7, other: _AT`7) -> builtins.bool"
+reveal_type(D.__lt__)  # N: Revealed type is "def [_AT] (self: _AT`8, other: _AT`8) -> builtins.bool"
 
 A() < A()
 B() < B()
diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test
index 77d2d519214a..d5024412ca97 100644
--- a/test-data/unit/check-selftype.test
+++ b/test-data/unit/check-selftype.test
@@ -1484,7 +1484,7 @@ class C:
             return self
 class D(C): ...
 
-reveal_type(C.meth)  # N: Revealed type is "def [Self <: __main__.C] (self: Self`0) -> builtins.list[Self`0]"
+reveal_type(C.meth)  # N: Revealed type is "def [Self <: __main__.C] (self: Self`1) -> builtins.list[Self`1]"
 C.attr  # E: Access to generic instance variables via class is ambiguous
 reveal_type(D().meth())  # N: Revealed type is "builtins.list[__main__.D]"
 reveal_type(D().attr)  # N: Revealed type is "builtins.list[__main__.D]"
@@ -1793,7 +1793,7 @@ class C:
     def bar(self) -> Self: ...
     def foo(self, x: S) -> Tuple[Self, S]: ...
 
-reveal_type(C.foo)  # N: Revealed type is "def [Self <: __main__.C, S] (self: Self`0, x: S`-1) -> Tuple[Self`0, S`-1]"
+reveal_type(C.foo)  # N: Revealed type is "def [Self <: __main__.C, S] (self: Self`1, x: S`2) -> Tuple[Self`1, S`2]"
 reveal_type(C().foo(42))  # N: Revealed type is "Tuple[__main__.C, builtins.int]"
 [builtins fixtures/tuple.pyi]
 
@@ -1903,7 +1903,7 @@ class C:
 
 class D(C): ...
 
-reveal_type(D.f)  # N: Revealed type is "def [T] (T`-1) -> T`-1"
+reveal_type(D.f)  # N: Revealed type is "def [T] (T`1) -> T`1"
 reveal_type(D().f)  # N: Revealed type is "def () -> __main__.D"
 
 [case testTypingSelfOnSuperTypeVarValues]
@@ -2015,3 +2015,24 @@ class Add(Async):
 reveal_type(Add.async_func())  # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> builtins.int"
 reveal_type(Add().async_func())  # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> builtins.int"
 [builtins fixtures/classmethod.pyi]
+
+[case testSelfTypeMethodOnClassObject]
+from typing import Self
+
+class Object:  # Needed to mimic object in typeshed
+    ref: Self
+
+class Foo:
+    def foo(self) -> Self:
+        return self
+
+class Ben(Object):
+    MY_MAP = {
+        "foo": Foo.foo,
+    }
+    @classmethod
+    def doit(cls) -> Foo:
+        reveal_type(cls.MY_MAP)  # N: Revealed type is "builtins.dict[builtins.str, def [Self <: __main__.Foo] (self: Self`4) -> Self`4]"
+        foo_method = cls.MY_MAP["foo"]
+        return foo_method(Foo())
+[builtins fixtures/isinstancelist.pyi]

From f9b1db6519cd88a081e8b8597240e166eb513245 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 25 Aug 2023 21:41:25 +0100
Subject: [PATCH 076/288] Fix crash on invalid type variable with ParamSpec
 (#15953)

Fixes https://github.com/python/mypy/issues/15948

The fix is straightforward: invalid type variable resulted in applying
type arguments packing/simplification when we shouldn't. Making the
latter more strict fixes the issue.

---------

Co-authored-by: Jelle Zijlstra <jelle.zijlstra@gmail.com>
---
 mypy/typeanal.py                              | 37 +++++++++++++++----
 .../unit/check-parameter-specification.test   | 23 ++++++++++++
 2 files changed, 53 insertions(+), 7 deletions(-)

diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 806b9967039e..e29cca09be63 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -458,11 +458,30 @@ def pack_paramspec_args(self, an_args: Sequence[Type]) -> list[Type]:
         # These do not support mypy_extensions VarArgs, etc. as they were already analyzed
         # TODO: should these be re-analyzed to get rid of this inconsistency?
         count = len(an_args)
-        if count > 0:
-            first_arg = get_proper_type(an_args[0])
-            if not (count == 1 and isinstance(first_arg, (Parameters, ParamSpecType, AnyType))):
-                return [Parameters(an_args, [ARG_POS] * count, [None] * count)]
-        return list(an_args)
+        if count == 0:
+            return []
+        if count == 1 and isinstance(get_proper_type(an_args[0]), AnyType):
+            # Single Any is interpreted as ..., rather that a single argument with Any type.
+            # I didn't find this in the PEP, but it sounds reasonable.
+            return list(an_args)
+        if any(isinstance(a, (Parameters, ParamSpecType)) for a in an_args):
+            if len(an_args) > 1:
+                first_wrong = next(
+                    arg for arg in an_args if isinstance(arg, (Parameters, ParamSpecType))
+                )
+                self.fail(
+                    "Nested parameter specifications are not allowed",
+                    first_wrong,
+                    code=codes.VALID_TYPE,
+                )
+                return [AnyType(TypeOfAny.from_error)]
+            return list(an_args)
+        first = an_args[0]
+        return [
+            Parameters(
+                an_args, [ARG_POS] * count, [None] * count, line=first.line, column=first.column
+            )
+        ]
 
     def cannot_resolve_type(self, t: UnboundType) -> None:
         # TODO: Move error message generation to messages.py. We'd first
@@ -503,7 +522,11 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type:
         names: list[str | None] = [None] * len(args)
 
         pre = Parameters(
-            args + pre.arg_types, [ARG_POS] * len(args) + pre.arg_kinds, names + pre.arg_names
+            args + pre.arg_types,
+            [ARG_POS] * len(args) + pre.arg_kinds,
+            names + pre.arg_names,
+            line=t.line,
+            column=t.column,
         )
         return ps.copy_modified(prefix=pre) if isinstance(ps, ParamSpecType) else pre
 
@@ -913,7 +936,7 @@ def visit_type_list(self, t: TypeList) -> Type:
             if params:
                 ts, kinds, names = params
                 # bind these types
-                return Parameters(self.anal_array(ts), kinds, names)
+                return Parameters(self.anal_array(ts), kinds, names, line=t.line, column=t.column)
             else:
                 return AnyType(TypeOfAny.from_error)
         else:
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index dba73974aef6..257fb9241373 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1741,3 +1741,26 @@ def bar(x): ...
 
 reveal_type(bar)  # N: Revealed type is "Overload(def (x: builtins.int) -> builtins.float, def (x: builtins.str) -> builtins.str)"
 [builtins fixtures/paramspec.pyi]
+
+[case testParamSpecDecoratorOverloadNoCrashOnInvalidTypeVar]
+from typing import Any, Callable, List
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+T = 1
+
+Alias = Callable[P, List[T]]  # type: ignore
+def dec(fn: Callable[P, T]) -> Alias[P, T]: ...  # type: ignore
+f: Any
+dec(f)  # No crash
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecErrorNestedParams]
+from typing import Generic
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+class C(Generic[P]): ...
+c: C[int, [int, str], str]  # E: Nested parameter specifications are not allowed
+reveal_type(c)  # N: Revealed type is "__main__.C[Any]"
+[builtins fixtures/paramspec.pyi]

From 7f65cc7570eaa4206ae086680e1c1d0489897efa Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 25 Aug 2023 22:30:39 +0100
Subject: [PATCH 077/288] Infer ParamSpec constraint from arguments (#15896)

Fixes https://github.com/python/mypy/issues/12278
Fixes https://github.com/python/mypy/issues/13191 (more tricky nested
use cases with optional/keyword args still don't work, but they are
quite tricky to fix and may selectively fixed later)

This unfortunately requires some special-casing, here is its summary:
* If actual argument for `Callable[P, T]` is non-generic and non-lambda,
do not put it into inference second pass.
* If we are able to infer constraints for `P` without using arguments
mapped to `*args: P.args` etc., do not add the constraint for `P` vs
those arguments (this applies to both top-level callable constraints,
and for nested callable constraints against callables that are known to
have imprecise argument kinds).

(Btw TODO I added is not related to this PR, I just noticed something
obviously wrong)
---
 mypy/checkexpr.py                             |  41 +++++-
 mypy/constraints.py                           | 136 +++++++++++++-----
 mypy/expandtype.py                            |   2 +
 mypy/infer.py                                 |   3 +-
 mypy/types.py                                 |  22 +++
 .../unit/check-parameter-specification.test   |  82 +++++++++--
 test-data/unit/fixtures/paramspec.pyi         |   3 +-
 test-data/unit/typexport-basic.test           |  24 ++--
 8 files changed, 244 insertions(+), 69 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 6de317f587cb..4430d0773cfa 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -1987,7 +1987,7 @@ def infer_function_type_arguments(
                 )
 
             arg_pass_nums = self.get_arg_infer_passes(
-                callee_type.arg_types, formal_to_actual, len(args)
+                callee_type, args, arg_types, formal_to_actual, len(args)
             )
 
             pass1_args: list[Type | None] = []
@@ -2001,6 +2001,7 @@ def infer_function_type_arguments(
                 callee_type,
                 pass1_args,
                 arg_kinds,
+                arg_names,
                 formal_to_actual,
                 context=self.argument_infer_context(),
                 strict=self.chk.in_checked_function(),
@@ -2061,6 +2062,7 @@ def infer_function_type_arguments(
                     callee_type,
                     arg_types,
                     arg_kinds,
+                    arg_names,
                     formal_to_actual,
                     context=self.argument_infer_context(),
                     strict=self.chk.in_checked_function(),
@@ -2140,6 +2142,7 @@ def infer_function_type_arguments_pass2(
             callee_type,
             arg_types,
             arg_kinds,
+            arg_names,
             formal_to_actual,
             context=self.argument_infer_context(),
         )
@@ -2152,7 +2155,12 @@ def argument_infer_context(self) -> ArgumentInferContext:
         )
 
     def get_arg_infer_passes(
-        self, arg_types: list[Type], formal_to_actual: list[list[int]], num_actuals: int
+        self,
+        callee: CallableType,
+        args: list[Expression],
+        arg_types: list[Type],
+        formal_to_actual: list[list[int]],
+        num_actuals: int,
     ) -> list[int]:
         """Return pass numbers for args for two-pass argument type inference.
 
@@ -2163,8 +2171,28 @@ def get_arg_infer_passes(
         lambdas more effectively.
         """
         res = [1] * num_actuals
-        for i, arg in enumerate(arg_types):
-            if arg.accept(ArgInferSecondPassQuery()):
+        for i, arg in enumerate(callee.arg_types):
+            skip_param_spec = False
+            p_formal = get_proper_type(callee.arg_types[i])
+            if isinstance(p_formal, CallableType) and p_formal.param_spec():
+                for j in formal_to_actual[i]:
+                    p_actual = get_proper_type(arg_types[j])
+                    # This is an exception from the usual logic where we put generic Callable
+                    # arguments in the second pass. If we have a non-generic actual, it is
+                    # likely to infer good constraints, for example if we have:
+                    #   def run(Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ...
+                    #   def test(x: int, y: int) -> int: ...
+                    #   run(test, 1, 2)
+                    # we will use `test` for inference, since it will allow to infer also
+                    # argument *names* for P <: [x: int, y: int].
+                    if (
+                        isinstance(p_actual, CallableType)
+                        and not p_actual.variables
+                        and not isinstance(args[j], LambdaExpr)
+                    ):
+                        skip_param_spec = True
+                        break
+            if not skip_param_spec and arg.accept(ArgInferSecondPassQuery()):
                 for j in formal_to_actual[i]:
                     res[j] = 2
         return res
@@ -4903,7 +4931,9 @@ def infer_lambda_type_using_context(
             self.chk.fail(message_registry.CANNOT_INFER_LAMBDA_TYPE, e)
             return None, None
 
-        return callable_ctx, callable_ctx
+        # Type of lambda must have correct argument names, to prevent false
+        # negatives when lambdas appear in `ParamSpec` context.
+        return callable_ctx.copy_modified(arg_names=e.arg_names), callable_ctx
 
     def visit_super_expr(self, e: SuperExpr) -> Type:
         """Type check a super expression (non-lvalue)."""
@@ -5921,6 +5951,7 @@ def __init__(self) -> None:
         super().__init__(types.ANY_STRATEGY)
 
     def visit_callable_type(self, t: CallableType) -> bool:
+        # TODO: we need to check only for type variables of original callable.
         return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery())
 
 
diff --git a/mypy/constraints.py b/mypy/constraints.py
index edce11e778ab..0e59b5459fd4 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -108,6 +108,7 @@ def infer_constraints_for_callable(
     callee: CallableType,
     arg_types: Sequence[Type | None],
     arg_kinds: list[ArgKind],
+    arg_names: Sequence[str | None] | None,
     formal_to_actual: list[list[int]],
     context: ArgumentInferContext,
 ) -> list[Constraint]:
@@ -118,6 +119,20 @@ def infer_constraints_for_callable(
     constraints: list[Constraint] = []
     mapper = ArgTypeExpander(context)
 
+    param_spec = callee.param_spec()
+    param_spec_arg_types = []
+    param_spec_arg_names = []
+    param_spec_arg_kinds = []
+
+    incomplete_star_mapping = False
+    for i, actuals in enumerate(formal_to_actual):
+        for actual in actuals:
+            if actual is None and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2):
+                # We can't use arguments to infer ParamSpec constraint, if only some
+                # are present in the current inference pass.
+                incomplete_star_mapping = True
+                break
+
     for i, actuals in enumerate(formal_to_actual):
         if isinstance(callee.arg_types[i], UnpackType):
             unpack_type = callee.arg_types[i]
@@ -194,11 +209,47 @@ def infer_constraints_for_callable(
                 actual_type = mapper.expand_actual_type(
                     actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i]
                 )
-                # TODO: if callee has ParamSpec, we need to collect all actuals that map to star
-                # args and create single constraint between P and resulting Parameters instead.
-                c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF)
-                constraints.extend(c)
-
+                if (
+                    param_spec
+                    and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2)
+                    and not incomplete_star_mapping
+                ):
+                    # If actual arguments are mapped to ParamSpec type, we can't infer individual
+                    # constraints, instead store them and infer single constraint at the end.
+                    # It is impossible to map actual kind to formal kind, so use some heuristic.
+                    # This inference is used as a fallback, so relying on heuristic should be OK.
+                    param_spec_arg_types.append(
+                        mapper.expand_actual_type(
+                            actual_arg_type, arg_kinds[actual], None, arg_kinds[actual]
+                        )
+                    )
+                    actual_kind = arg_kinds[actual]
+                    param_spec_arg_kinds.append(
+                        ARG_POS if actual_kind not in (ARG_STAR, ARG_STAR2) else actual_kind
+                    )
+                    param_spec_arg_names.append(arg_names[actual] if arg_names else None)
+                else:
+                    c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF)
+                    constraints.extend(c)
+    if (
+        param_spec
+        and not any(c.type_var == param_spec.id for c in constraints)
+        and not incomplete_star_mapping
+    ):
+        # Use ParamSpec constraint from arguments only if there are no other constraints,
+        # since as explained above it is quite ad-hoc.
+        constraints.append(
+            Constraint(
+                param_spec,
+                SUPERTYPE_OF,
+                Parameters(
+                    arg_types=param_spec_arg_types,
+                    arg_kinds=param_spec_arg_kinds,
+                    arg_names=param_spec_arg_names,
+                    imprecise_arg_kinds=True,
+                ),
+            )
+        )
     return constraints
 
 
@@ -949,6 +1000,14 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
             res: list[Constraint] = []
             cactual = self.actual.with_unpacked_kwargs()
             param_spec = template.param_spec()
+
+            template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type
+            if template.type_guard is not None:
+                template_ret_type = template.type_guard
+            if cactual.type_guard is not None:
+                cactual_ret_type = cactual.type_guard
+            res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction))
+
             if param_spec is None:
                 # TODO: Erase template variables if it is generic?
                 if (
@@ -1008,51 +1067,50 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                     )
                     extra_tvars = True
 
+                # Compare prefixes as well
+                cactual_prefix = cactual.copy_modified(
+                    arg_types=cactual.arg_types[:prefix_len],
+                    arg_kinds=cactual.arg_kinds[:prefix_len],
+                    arg_names=cactual.arg_names[:prefix_len],
+                )
+                res.extend(
+                    infer_callable_arguments_constraints(prefix, cactual_prefix, self.direction)
+                )
+
+                param_spec_target: Type | None = None
+                skip_imprecise = (
+                    any(c.type_var == param_spec.id for c in res) and cactual.imprecise_arg_kinds
+                )
                 if not cactual_ps:
                     max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)])
                     prefix_len = min(prefix_len, max_prefix_len)
-                    res.append(
-                        Constraint(
-                            param_spec,
-                            neg_op(self.direction),
-                            Parameters(
-                                arg_types=cactual.arg_types[prefix_len:],
-                                arg_kinds=cactual.arg_kinds[prefix_len:],
-                                arg_names=cactual.arg_names[prefix_len:],
-                                variables=cactual.variables
-                                if not type_state.infer_polymorphic
-                                else [],
-                            ),
+                    # This logic matches top-level callable constraint exception, if we managed
+                    # to get other constraints for ParamSpec, don't infer one with imprecise kinds
+                    if not skip_imprecise:
+                        param_spec_target = Parameters(
+                            arg_types=cactual.arg_types[prefix_len:],
+                            arg_kinds=cactual.arg_kinds[prefix_len:],
+                            arg_names=cactual.arg_names[prefix_len:],
+                            variables=cactual.variables
+                            if not type_state.infer_polymorphic
+                            else [],
+                            imprecise_arg_kinds=cactual.imprecise_arg_kinds,
                         )
-                    )
                 else:
-                    if len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types):
-                        cactual_ps = cactual_ps.copy_modified(
+                    if (
+                        len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types)
+                        and not skip_imprecise
+                    ):
+                        param_spec_target = cactual_ps.copy_modified(
                             prefix=Parameters(
                                 arg_types=cactual_ps.prefix.arg_types[prefix_len:],
                                 arg_kinds=cactual_ps.prefix.arg_kinds[prefix_len:],
                                 arg_names=cactual_ps.prefix.arg_names[prefix_len:],
+                                imprecise_arg_kinds=cactual_ps.prefix.imprecise_arg_kinds,
                             )
                         )
-                        res.append(Constraint(param_spec, neg_op(self.direction), cactual_ps))
-
-                # Compare prefixes as well
-                cactual_prefix = cactual.copy_modified(
-                    arg_types=cactual.arg_types[:prefix_len],
-                    arg_kinds=cactual.arg_kinds[:prefix_len],
-                    arg_names=cactual.arg_names[:prefix_len],
-                )
-                res.extend(
-                    infer_callable_arguments_constraints(prefix, cactual_prefix, self.direction)
-                )
-
-            template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type
-            if template.type_guard is not None:
-                template_ret_type = template.type_guard
-            if cactual.type_guard is not None:
-                cactual_ret_type = cactual.type_guard
-
-            res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction))
+                if param_spec_target is not None:
+                    res.append(Constraint(param_spec, neg_op(self.direction), param_spec_target))
             if extra_tvars:
                 for c in res:
                     c.extra_tvars += cactual.variables
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index dc3dae670c1f..7168d7c30b0d 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -336,6 +336,7 @@ def visit_callable_type(self, t: CallableType) -> CallableType:
                     arg_types=self.expand_types(t.arg_types),
                     ret_type=t.ret_type.accept(self),
                     type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None),
+                    imprecise_arg_kinds=(t.imprecise_arg_kinds or repl.imprecise_arg_kinds),
                 )
             elif isinstance(repl, ParamSpecType):
                 # We're substituting one ParamSpec for another; this can mean that the prefix
@@ -352,6 +353,7 @@ def visit_callable_type(self, t: CallableType) -> CallableType:
                     arg_names=t.arg_names[:-2] + prefix.arg_names + t.arg_names[-2:],
                     ret_type=t.ret_type.accept(self),
                     from_concatenate=t.from_concatenate or bool(repl.prefix.arg_types),
+                    imprecise_arg_kinds=(t.imprecise_arg_kinds or prefix.imprecise_arg_kinds),
                 )
 
         var_arg = t.var_arg()
diff --git a/mypy/infer.py b/mypy/infer.py
index f34087910e4b..ba4a1d2bc9b1 100644
--- a/mypy/infer.py
+++ b/mypy/infer.py
@@ -33,6 +33,7 @@ def infer_function_type_arguments(
     callee_type: CallableType,
     arg_types: Sequence[Type | None],
     arg_kinds: list[ArgKind],
+    arg_names: Sequence[str | None] | None,
     formal_to_actual: list[list[int]],
     context: ArgumentInferContext,
     strict: bool = True,
@@ -53,7 +54,7 @@ def infer_function_type_arguments(
     """
     # Infer constraints.
     constraints = infer_constraints_for_callable(
-        callee_type, arg_types, arg_kinds, formal_to_actual, context
+        callee_type, arg_types, arg_kinds, arg_names, formal_to_actual, context
     )
 
     # Solve constraints.
diff --git a/mypy/types.py b/mypy/types.py
index 214978eab774..cf2c343655dd 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1560,6 +1560,7 @@ class Parameters(ProperType):
         # TODO: variables don't really belong here, but they are used to allow hacky support
         # for forall . Foo[[x: T], T] by capturing generic callable with ParamSpec, see #15909
         "variables",
+        "imprecise_arg_kinds",
     )
 
     def __init__(
@@ -1570,6 +1571,7 @@ def __init__(
         *,
         variables: Sequence[TypeVarLikeType] | None = None,
         is_ellipsis_args: bool = False,
+        imprecise_arg_kinds: bool = False,
         line: int = -1,
         column: int = -1,
     ) -> None:
@@ -1582,6 +1584,7 @@ def __init__(
         self.min_args = arg_kinds.count(ARG_POS)
         self.is_ellipsis_args = is_ellipsis_args
         self.variables = variables or []
+        self.imprecise_arg_kinds = imprecise_arg_kinds
 
     def copy_modified(
         self,
@@ -1591,6 +1594,7 @@ def copy_modified(
         *,
         variables: Bogus[Sequence[TypeVarLikeType]] = _dummy,
         is_ellipsis_args: Bogus[bool] = _dummy,
+        imprecise_arg_kinds: Bogus[bool] = _dummy,
     ) -> Parameters:
         return Parameters(
             arg_types=arg_types if arg_types is not _dummy else self.arg_types,
@@ -1600,6 +1604,11 @@ def copy_modified(
                 is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args
             ),
             variables=variables if variables is not _dummy else self.variables,
+            imprecise_arg_kinds=(
+                imprecise_arg_kinds
+                if imprecise_arg_kinds is not _dummy
+                else self.imprecise_arg_kinds
+            ),
         )
 
     # TODO: here is a lot of code duplication with Callable type, fix this.
@@ -1696,6 +1705,7 @@ def serialize(self) -> JsonDict:
             "arg_kinds": [int(x.value) for x in self.arg_kinds],
             "arg_names": self.arg_names,
             "variables": [tv.serialize() for tv in self.variables],
+            "imprecise_arg_kinds": self.imprecise_arg_kinds,
         }
 
     @classmethod
@@ -1706,6 +1716,7 @@ def deserialize(cls, data: JsonDict) -> Parameters:
             [ArgKind(x) for x in data["arg_kinds"]],
             data["arg_names"],
             variables=[cast(TypeVarLikeType, deserialize_type(v)) for v in data["variables"]],
+            imprecise_arg_kinds=data["imprecise_arg_kinds"],
         )
 
     def __hash__(self) -> int:
@@ -1762,6 +1773,7 @@ class CallableType(FunctionLike):
         "type_guard",  # T, if -> TypeGuard[T] (ret_type is bool in this case).
         "from_concatenate",  # whether this callable is from a concatenate object
         # (this is used for error messages)
+        "imprecise_arg_kinds",
         "unpack_kwargs",  # Was an Unpack[...] with **kwargs used to define this callable?
     )
 
@@ -1786,6 +1798,7 @@ def __init__(
         def_extras: dict[str, Any] | None = None,
         type_guard: Type | None = None,
         from_concatenate: bool = False,
+        imprecise_arg_kinds: bool = False,
         unpack_kwargs: bool = False,
     ) -> None:
         super().__init__(line, column)
@@ -1812,6 +1825,7 @@ def __init__(
         self.special_sig = special_sig
         self.from_type_type = from_type_type
         self.from_concatenate = from_concatenate
+        self.imprecise_arg_kinds = imprecise_arg_kinds
         if not bound_args:
             bound_args = ()
         self.bound_args = bound_args
@@ -1854,6 +1868,7 @@ def copy_modified(
         def_extras: Bogus[dict[str, Any]] = _dummy,
         type_guard: Bogus[Type | None] = _dummy,
         from_concatenate: Bogus[bool] = _dummy,
+        imprecise_arg_kinds: Bogus[bool] = _dummy,
         unpack_kwargs: Bogus[bool] = _dummy,
     ) -> CT:
         modified = CallableType(
@@ -1879,6 +1894,11 @@ def copy_modified(
             from_concatenate=(
                 from_concatenate if from_concatenate is not _dummy else self.from_concatenate
             ),
+            imprecise_arg_kinds=(
+                imprecise_arg_kinds
+                if imprecise_arg_kinds is not _dummy
+                else self.imprecise_arg_kinds
+            ),
             unpack_kwargs=unpack_kwargs if unpack_kwargs is not _dummy else self.unpack_kwargs,
         )
         # Optimization: Only NewTypes are supported as subtypes since
@@ -2191,6 +2211,7 @@ def serialize(self) -> JsonDict:
             "def_extras": dict(self.def_extras),
             "type_guard": self.type_guard.serialize() if self.type_guard is not None else None,
             "from_concatenate": self.from_concatenate,
+            "imprecise_arg_kinds": self.imprecise_arg_kinds,
             "unpack_kwargs": self.unpack_kwargs,
         }
 
@@ -2214,6 +2235,7 @@ def deserialize(cls, data: JsonDict) -> CallableType:
                 deserialize_type(data["type_guard"]) if data["type_guard"] is not None else None
             ),
             from_concatenate=data["from_concatenate"],
+            imprecise_arg_kinds=data["imprecise_arg_kinds"],
             unpack_kwargs=data["unpack_kwargs"],
         )
 
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index 257fb9241373..ed1d59b376d2 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -239,7 +239,6 @@ reveal_type(f(g, 1, y='x'))  # N: Revealed type is "None"
 f(g, 'x', y='x')  # E: Argument 2 to "f" has incompatible type "str"; expected "int"
 f(g, 1, y=1)  # E: Argument "y" to "f" has incompatible type "int"; expected "str"
 f(g)  # E: Missing positional arguments "x", "y" in call to "f"
-
 [builtins fixtures/dict.pyi]
 
 [case testParamSpecSpecialCase]
@@ -415,14 +414,19 @@ P = ParamSpec('P')
 T = TypeVar('T')
 
 # Similar to atexit.register
-def register(f: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> Callable[P, T]: ...  # N: "register" defined here
+def register(f: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> Callable[P, T]: ...
 
 def f(x: int) -> None: pass
+def g(x: int, y: str) -> None: pass
 
 reveal_type(register(lambda: f(1)))  # N: Revealed type is "def ()"
-reveal_type(register(lambda x: f(x), x=1))  # N: Revealed type is "def (x: Any)"
-register(lambda x: f(x))  # E: Missing positional argument "x" in call to "register"
-register(lambda x: f(x), y=1)  # E: Unexpected keyword argument "y" for "register"
+reveal_type(register(lambda x: f(x), x=1))  # N: Revealed type is "def (x: Literal[1]?)"
+register(lambda x: f(x))  # E: Cannot infer type of lambda \
+                          # E: Argument 1 to "register" has incompatible type "Callable[[Any], None]"; expected "Callable[[], None]"
+register(lambda x: f(x), y=1)  # E: Argument 1 to "register" has incompatible type "Callable[[Arg(int, 'x')], None]"; expected "Callable[[Arg(int, 'y')], None]"
+reveal_type(register(lambda x: f(x), 1))  # N: Revealed type is "def (Literal[1]?)"
+reveal_type(register(lambda x, y: g(x, y), 1, "a"))  # N: Revealed type is "def (Literal[1]?, Literal['a']?)"
+reveal_type(register(lambda x, y: g(x, y), 1, y="a"))  # N: Revealed type is "def (Literal[1]?, y: Literal['a']?)"
 [builtins fixtures/dict.pyi]
 
 [case testParamSpecInvalidCalls]
@@ -909,8 +913,7 @@ def f(x: int) -> int:
 
 reveal_type(A().func(f, 42))  # N: Revealed type is "builtins.int"
 
-# TODO: this should reveal `int`
-reveal_type(A().func(lambda x: x + x, 42))  # N: Revealed type is "Any"
+reveal_type(A().func(lambda x: x + x, 42))  # N: Revealed type is "builtins.int"
 [builtins fixtures/paramspec.pyi]
 
 [case testParamSpecConstraintOnOtherParamSpec]
@@ -1355,7 +1358,6 @@ P = ParamSpec('P')
 class Some(Generic[P]):
     def call(self, *args: P.args, **kwargs: P.kwargs): ...
 
-# TODO: this probably should be reported.
 def call(*args: P.args, **kwargs: P.kwargs): ...
 [builtins fixtures/paramspec.pyi]
 
@@ -1631,7 +1633,41 @@ dec(test_with_bound)(0)  # E: Value of type variable "T" of function cannot be "
 dec(test_with_bound)(A())  # OK
 [builtins fixtures/paramspec.pyi]
 
+[case testParamSpecArgumentParamInferenceRegular]
+from typing import TypeVar, Generic
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+class Foo(Generic[P]):
+    def call(self, *args: P.args, **kwargs: P.kwargs) -> None: ...
+def test(*args: P.args, **kwargs: P.kwargs) -> Foo[P]: ...
+
+reveal_type(test(1, 2))  # N: Revealed type is "__main__.Foo[[Literal[1]?, Literal[2]?]]"
+reveal_type(test(x=1, y=2))  # N: Revealed type is "__main__.Foo[[x: Literal[1]?, y: Literal[2]?]]"
+ints = [1, 2, 3]
+reveal_type(test(*ints))  # N: Revealed type is "__main__.Foo[[*builtins.int]]"
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecArgumentParamInferenceGeneric]
+# flags: --new-type-inference
+from typing import Callable, TypeVar
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+R = TypeVar("R")
+def call(f: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> R:
+    return f(*args, **kwargs)
+
+T = TypeVar("T")
+def identity(x: T) -> T:
+    return x
+
+reveal_type(call(identity, 2))  # N: Revealed type is "builtins.int"
+y: int = call(identity, 2)
+[builtins fixtures/paramspec.pyi]
+
 [case testParamSpecNestedApplyNoCrash]
+# flags: --new-type-inference
 from typing import Callable, TypeVar
 from typing_extensions import ParamSpec
 
@@ -1639,9 +1675,33 @@ P = ParamSpec("P")
 T = TypeVar("T")
 
 def apply(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: ...
-def test() -> None: ...
-# TODO: avoid this error, although it may be non-trivial.
-apply(apply, test)  # E: Argument 2 to "apply" has incompatible type "Callable[[], None]"; expected "Callable[P, T]"
+def test() -> int: ...
+reveal_type(apply(apply, test))  # N: Revealed type is "builtins.int"
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecNestedApplyPosVsNamed]
+from typing import Callable, TypeVar
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+def apply(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> None: ...
+def test(x: int) -> int: ...
+apply(apply, test, x=42)  # OK
+apply(apply, test, 42)  # Also OK (but requires some special casing)
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecApplyPosVsNamedOptional]
+from typing import Callable, TypeVar
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+def apply(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> None: ...
+def test(x: str = ..., y: int = ...) -> int: ...
+apply(test, y=42)  # OK
 [builtins fixtures/paramspec.pyi]
 
 [case testParamSpecPrefixSubtypingGenericInvalid]
diff --git a/test-data/unit/fixtures/paramspec.pyi b/test-data/unit/fixtures/paramspec.pyi
index 5e4b8564e238..9b0089f6a7e9 100644
--- a/test-data/unit/fixtures/paramspec.pyi
+++ b/test-data/unit/fixtures/paramspec.pyi
@@ -30,7 +30,8 @@ class list(Sequence[T], Generic[T]):
     def __iter__(self) -> Iterator[T]: ...
 
 class int:
-    def __neg__(self) -> 'int': ...
+    def __neg__(self) -> int: ...
+    def __add__(self, other: int) -> int: ...
 
 class bool(int): ...
 class float: ...
diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test
index cd2afe2c1c75..c4c3a1d36f83 100644
--- a/test-data/unit/typexport-basic.test
+++ b/test-data/unit/typexport-basic.test
@@ -727,7 +727,7 @@ class A: pass
 class B:
   a = None # type: A
 [out]
-LambdaExpr(2) : def (B) -> A
+LambdaExpr(2) : def (x: B) -> A
 MemberExpr(2) : A
 NameExpr(2) : B
 
@@ -756,7 +756,7 @@ class B:
   a = None # type: A
 [builtins fixtures/list.pyi]
 [out]
-LambdaExpr(2) : def (B) -> builtins.list[A]
+LambdaExpr(2) : def (x: B) -> builtins.list[A]
 ListExpr(2) : builtins.list[A]
 
 [case testLambdaAndHigherOrderFunction]
@@ -775,7 +775,7 @@ map(
 CallExpr(9) : builtins.list[B]
 NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
 CallExpr(10) : B
-LambdaExpr(10) : def (A) -> B
+LambdaExpr(10) : def (x: A) -> B
 NameExpr(10) : def (a: A) -> B
 NameExpr(10) : builtins.list[A]
 NameExpr(10) : A
@@ -795,7 +795,7 @@ map(
 [builtins fixtures/list.pyi]
 [out]
 NameExpr(10) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B]
-LambdaExpr(11) : def (A) -> builtins.list[B]
+LambdaExpr(11) : def (x: A) -> builtins.list[B]
 ListExpr(11) : builtins.list[B]
 NameExpr(11) : def (a: A) -> B
 NameExpr(11) : builtins.list[A]
@@ -817,7 +817,7 @@ map(
 --      context. Perhaps just fail instead?
 CallExpr(7) : builtins.list[Any]
 NameExpr(7) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any]
-LambdaExpr(8) : def (A) -> A
+LambdaExpr(8) : def (x: A) -> A
 ListExpr(8) : builtins.list[def (A) -> Any]
 NameExpr(8) : A
 NameExpr(9) : builtins.list[A]
@@ -838,7 +838,7 @@ map(
 [out]
 CallExpr(9) : builtins.list[B]
 NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
-LambdaExpr(10) : def (A) -> B
+LambdaExpr(10) : def (x: A) -> B
 MemberExpr(10) : B
 NameExpr(10) : A
 NameExpr(11) : builtins.list[A]
@@ -860,7 +860,7 @@ map(
 CallExpr(9) : builtins.list[B]
 NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B]
 NameExpr(10) : builtins.list[A]
-LambdaExpr(11) : def (A) -> B
+LambdaExpr(11) : def (x: A) -> B
 MemberExpr(11) : B
 NameExpr(11) : A
 
@@ -1212,7 +1212,7 @@ f(
 [builtins fixtures/list.pyi]
 [out]
 NameExpr(8) : Overload(def (x: builtins.int, f: def (builtins.int) -> builtins.int), def (x: builtins.str, f: def (builtins.str) -> builtins.str))
-LambdaExpr(9) : def (builtins.int) -> builtins.int
+LambdaExpr(9) : def (x: builtins.int) -> builtins.int
 NameExpr(9) : builtins.int
 
 [case testExportOverloadArgTypeNested]
@@ -1231,10 +1231,10 @@ f(
     lambda x: x)
 [builtins fixtures/list.pyi]
 [out]
-LambdaExpr(9) : def (builtins.int) -> builtins.int
-LambdaExpr(10) : def (builtins.int) -> builtins.int
-LambdaExpr(12) : def (builtins.str) -> builtins.str
-LambdaExpr(13) : def (builtins.str) -> builtins.str
+LambdaExpr(9) : def (y: builtins.int) -> builtins.int
+LambdaExpr(10) : def (x: builtins.int) -> builtins.int
+LambdaExpr(12) : def (y: builtins.str) -> builtins.str
+LambdaExpr(13) : def (x: builtins.str) -> builtins.str
 
 -- TODO
 --

From 29abf398d6a9e88e899df8a1941019105821f9f0 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 26 Aug 2023 21:30:13 +0100
Subject: [PATCH 078/288] Support PEP 646 syntax for Callable (#15951)

Fixes https://github.com/python/mypy/issues/15412

Two new things here as specified by PEP 646:
* Using star for an (explicit) type unpaking in callables, like
`Callable[[str, *tuple[int, ...]], None]`
* Allowing suffix items after a variadic item, like `Callable[[X,
Unpack[Ys], Z], bool]`

Implementation is straightforward. Btw while working in this I
accidentally fixed a nasty bug, tuple types were often not given any
line/column numbers, so if such type becomes a location of an error, it
is impossible to ignore.
---
 mypy/exprtotype.py                      | 10 +++-
 mypy/fastparse.py                       | 14 ++++-
 mypy/typeanal.py                        | 73 +++++++++++++++++++------
 test-data/unit/check-typevar-tuple.test | 58 +++++++++++++++-----
 4 files changed, 123 insertions(+), 32 deletions(-)

diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py
index bbc284a5188a..b82d35607ef1 100644
--- a/mypy/exprtotype.py
+++ b/mypy/exprtotype.py
@@ -17,6 +17,7 @@
     NameExpr,
     OpExpr,
     RefExpr,
+    StarExpr,
     StrExpr,
     TupleExpr,
     UnaryExpr,
@@ -35,6 +36,7 @@
     TypeOfAny,
     UnboundType,
     UnionType,
+    UnpackType,
 )
 
 
@@ -56,6 +58,7 @@ def expr_to_unanalyzed_type(
     options: Options | None = None,
     allow_new_syntax: bool = False,
     _parent: Expression | None = None,
+    allow_unpack: bool = False,
 ) -> ProperType:
     """Translate an expression to the corresponding type.
 
@@ -163,7 +166,10 @@ def expr_to_unanalyzed_type(
         return CallableArgument(typ, name, arg_const, expr.line, expr.column)
     elif isinstance(expr, ListExpr):
         return TypeList(
-            [expr_to_unanalyzed_type(t, options, allow_new_syntax, expr) for t in expr.items],
+            [
+                expr_to_unanalyzed_type(t, options, allow_new_syntax, expr, allow_unpack=True)
+                for t in expr.items
+            ],
             line=expr.line,
             column=expr.column,
         )
@@ -189,5 +195,7 @@ def expr_to_unanalyzed_type(
         return RawExpressionType(None, "builtins.complex", line=expr.line, column=expr.column)
     elif isinstance(expr, EllipsisExpr):
         return EllipsisType(expr.line)
+    elif allow_unpack and isinstance(expr, StarExpr):
+        return UnpackType(expr_to_unanalyzed_type(expr.expr, options, allow_new_syntax))
     else:
         raise TypeTranslationError()
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 3a26cfe7d6ff..6aa626afb81e 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -115,6 +115,7 @@
     TypeOfAny,
     UnboundType,
     UnionType,
+    UnpackType,
 )
 from mypy.util import bytes_to_human_readable_repr, unnamed_function
 
@@ -1730,6 +1731,7 @@ def __init__(
         self.override_column = override_column
         self.node_stack: list[AST] = []
         self.is_evaluated = is_evaluated
+        self.allow_unpack = False
 
     def convert_column(self, column: int) -> int:
         """Apply column override if defined; otherwise return column.
@@ -2006,10 +2008,20 @@ def visit_Attribute(self, n: Attribute) -> Type:
         else:
             return self.invalid_type(n)
 
+    # Used for Callable[[X *Ys, Z], R]
+    def visit_Starred(self, n: ast3.Starred) -> Type:
+        return UnpackType(self.visit(n.value))
+
     # List(expr* elts, expr_context ctx)
     def visit_List(self, n: ast3.List) -> Type:
         assert isinstance(n.ctx, ast3.Load)
-        return self.translate_argument_list(n.elts)
+        old_allow_unpack = self.allow_unpack
+        # We specifically only allow starred expressions in a list to avoid
+        # confusing errors for top-level unpacks (e.g. in base classes).
+        self.allow_unpack = True
+        result = self.translate_argument_list(n.elts)
+        self.allow_unpack = old_allow_unpack
+        return result
 
 
 def stringify_name(n: AST) -> str | None:
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index e29cca09be63..1955d2bc3c43 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -568,7 +568,9 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ
                 instance = self.named_type("builtins.tuple", [self.anal_type(t.args[0])])
                 instance.line = t.line
                 return instance
-            return self.tuple_type(self.anal_array(t.args, allow_unpack=True))
+            return self.tuple_type(
+                self.anal_array(t.args, allow_unpack=True), line=t.line, column=t.column
+            )
         elif fullname == "typing.Union":
             items = self.anal_array(t.args)
             return UnionType.make_union(items)
@@ -968,7 +970,10 @@ def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type:
         return t
 
     def visit_unpack_type(self, t: UnpackType) -> Type:
-        raise NotImplementedError
+        if not self.allow_unpack:
+            self.fail(message_registry.INVALID_UNPACK_POSITION, t.type, code=codes.VALID_TYPE)
+            return AnyType(TypeOfAny.from_error)
+        return UnpackType(self.anal_type(t.type))
 
     def visit_parameters(self, t: Parameters) -> Type:
         raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars")
@@ -1364,12 +1369,22 @@ def analyze_callable_type(self, t: UnboundType) -> Type:
         assert isinstance(ret, CallableType)
         return ret.accept(self)
 
+    def refers_to_full_names(self, arg: UnboundType, names: Sequence[str]) -> bool:
+        sym = self.lookup_qualified(arg.name, arg)
+        if sym is not None:
+            if sym.fullname in names:
+                return True
+        return False
+
     def analyze_callable_args(
         self, arglist: TypeList
     ) -> tuple[list[Type], list[ArgKind], list[str | None]] | None:
         args: list[Type] = []
         kinds: list[ArgKind] = []
         names: list[str | None] = []
+        seen_unpack = False
+        unpack_types: list[Type] = []
+        invalid_unpacks = []
         for arg in arglist.items:
             if isinstance(arg, CallableArgument):
                 args.append(arg.typ)
@@ -1390,20 +1405,42 @@ def analyze_callable_args(
                     if arg.name is not None and kind.is_star():
                         self.fail(f"{arg.constructor} arguments should not have names", arg)
                         return None
-            elif isinstance(arg, UnboundType):
-                kind = ARG_POS
-                # Potentially a unpack.
-                sym = self.lookup_qualified(arg.name, arg)
-                if sym is not None:
-                    if sym.fullname in ("typing_extensions.Unpack", "typing.Unpack"):
-                        kind = ARG_STAR
-                args.append(arg)
-                kinds.append(kind)
-                names.append(None)
+            elif (
+                isinstance(arg, UnboundType)
+                and self.refers_to_full_names(arg, ("typing_extensions.Unpack", "typing.Unpack"))
+                or isinstance(arg, UnpackType)
+            ):
+                if seen_unpack:
+                    # Multiple unpacks, preserve them, so we can give an error later.
+                    invalid_unpacks.append(arg)
+                    continue
+                seen_unpack = True
+                unpack_types.append(arg)
+            else:
+                if seen_unpack:
+                    unpack_types.append(arg)
+                else:
+                    args.append(arg)
+                    kinds.append(ARG_POS)
+                    names.append(None)
+        if seen_unpack:
+            if len(unpack_types) == 1:
+                args.append(unpack_types[0])
             else:
-                args.append(arg)
-                kinds.append(ARG_POS)
-                names.append(None)
+                first = unpack_types[0]
+                if isinstance(first, UnpackType):
+                    # UnpackType doesn't have its own line/column numbers,
+                    # so use the unpacked type for error messages.
+                    first = first.type
+                args.append(
+                    UnpackType(self.tuple_type(unpack_types, line=first.line, column=first.column))
+                )
+            kinds.append(ARG_STAR)
+            names.append(None)
+        for arg in invalid_unpacks:
+            args.append(arg)
+            kinds.append(ARG_STAR)
+            names.append(None)
         # Note that arglist below is only used for error context.
         check_arg_names(names, [arglist] * len(args), self.fail, "Callable")
         check_arg_kinds(kinds, [arglist] * len(args), self.fail)
@@ -1713,9 +1750,11 @@ def check_unpacks_in_list(self, items: list[Type]) -> list[Type]:
             self.fail("More than one Unpack in a type is not allowed", final_unpack)
         return new_items
 
-    def tuple_type(self, items: list[Type]) -> TupleType:
+    def tuple_type(self, items: list[Type], line: int, column: int) -> TupleType:
         any_type = AnyType(TypeOfAny.special_form)
-        return TupleType(items, fallback=self.named_type("builtins.tuple", [any_type]))
+        return TupleType(
+            items, fallback=self.named_type("builtins.tuple", [any_type]), line=line, column=column
+        )
 
 
 TypeVarLikeList = List[Tuple[str, TypeVarLikeExpr]]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index ee81597edadf..c7716f3e8346 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -509,6 +509,51 @@ call_prefix(target=func_prefix, args=(0, 'foo'))
 call_prefix(target=func2_prefix, args=(0, 'foo'))  # E: Argument "target" to "call_prefix" has incompatible type "Callable[[str, int, str], None]"; expected "Callable[[bytes, int, str], None]"
 [builtins fixtures/tuple.pyi]
 
+[case testTypeVarTuplePep646CallableSuffixSyntax]
+from typing import Callable, Tuple, TypeVar
+from typing_extensions import Unpack, TypeVarTuple
+
+x: Callable[[str, Unpack[Tuple[int, ...]], bool], None]
+reveal_type(x)  # N: Revealed type is "def (builtins.str, *Unpack[Tuple[Unpack[builtins.tuple[builtins.int, ...]], builtins.bool]])"
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+A = Callable[[T, Unpack[Ts], S], int]
+y: A[int, str, bool]
+reveal_type(y)  # N: Revealed type is "def (builtins.int, builtins.str, builtins.bool) -> builtins.int"
+z: A[Unpack[Tuple[int, ...]]]
+reveal_type(z)  # N: Revealed type is "def (builtins.int, *Unpack[Tuple[Unpack[builtins.tuple[builtins.int, ...]], builtins.int]]) -> builtins.int"
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTuplePep646CallableInvalidSyntax]
+from typing import Callable, Tuple, TypeVar
+from typing_extensions import Unpack, TypeVarTuple
+
+Ts = TypeVarTuple("Ts")
+Us = TypeVarTuple("Us")
+a: Callable[[Unpack[Ts], Unpack[Us]], int]  # E: Var args may not appear after named or var args \
+                                            # E: More than one Unpack in a type is not allowed
+reveal_type(a)  # N: Revealed type is "def [Ts, Us] (*Unpack[Ts`-1]) -> builtins.int"
+b: Callable[[Unpack], int]  # E: Unpack[...] requires exactly one type argument
+reveal_type(b)  # N: Revealed type is "def (*Any) -> builtins.int"
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTuplePep646CallableNewSyntax]
+from typing import Callable, Generic, Tuple
+from typing_extensions import ParamSpec
+
+x: Callable[[str, *Tuple[int, ...]], None]
+reveal_type(x)  # N: Revealed type is "def (builtins.str, *builtins.int)"
+y: Callable[[str, *Tuple[int, ...], bool], None]
+reveal_type(y)  # N: Revealed type is "def (builtins.str, *Unpack[Tuple[Unpack[builtins.tuple[builtins.int, ...]], builtins.bool]])"
+
+P = ParamSpec("P")
+class C(Generic[P]): ...
+bad: C[[int, *Tuple[int, ...], int]]  # E: Unpack is only valid in a variadic position
+reveal_type(bad)  # N: Revealed type is "__main__.C[[builtins.int, *Any]]"
+[builtins fixtures/tuple.pyi]
+
 [case testTypeVarTuplePep646UnspecifiedParameters]
 from typing import Tuple, Generic, TypeVar
 from typing_extensions import Unpack, TypeVarTuple
@@ -635,19 +680,6 @@ x: A[str, str]
 reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.str, builtins.str]"
 [builtins fixtures/tuple.pyi]
 
-[case testVariadicAliasWrongCallable]
-from typing import TypeVar, Callable
-from typing_extensions import Unpack, TypeVarTuple
-
-T = TypeVar("T")
-S = TypeVar("S")
-Ts = TypeVarTuple("Ts")
-
-A = Callable[[T, Unpack[Ts], S], int]  # E: Required positional args may not appear after default, named or var args
-x: A[int, str, int, str]
-reveal_type(x)  # N: Revealed type is "def (builtins.int, builtins.str, builtins.int, builtins.str) -> builtins.int"
-[builtins fixtures/tuple.pyi]
-
 [case testVariadicAliasMultipleUnpacks]
 from typing import Tuple, Generic, Callable
 from typing_extensions import Unpack, TypeVarTuple

From efecd591e4198232f35e1db66bf99e56fc2f068b Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 26 Aug 2023 21:34:38 +0100
Subject: [PATCH 079/288] Support user defined variadic tuple types (#15961)

Fixes https://github.com/python/mypy/issues/15946

Note this actually adds support also for variadic NamedTuples and
variadic TypedDicts. Not that anyone requested this, but since generic
NamedTuples and generic TypedDicts are supported using the same
mechanism (special aliases) as generic tuple types (like `class
A(Tuple[T, S]): ...` in the issue), it looked more risky and arbitrary
to _not_support them.

Btw the implementation is simple, but while I was working on this, I
accidentally found a problem with my general idea of doing certain type
normlaizations in `semanal_typeargs.py`. The problem is that sometimes
we can call `get_proper_type()` during semantic analysis, so all the
code that gets triggered by this (mostly `expand_type()`) can't really
rely on types being normalized. Fortunately, with just few tweaks I
manged to make the code mostly robust to such scenarios (TBH there are
few possible holes left, but this is getting really complex, I think it
is better to release this, and see if people will ever hit such
scenarios, then fix accordingly).
---
 mypy/expandtype.py                      |  7 +-
 mypy/maptype.py                         |  1 -
 mypy/nodes.py                           | 12 +++-
 mypy/semanal.py                         | 10 ++-
 mypy/semanal_typeargs.py                | 14 ++--
 mypy/semanal_typeddict.py               |  1 +
 mypy/typeanal.py                        | 12 ++--
 mypy/types.py                           | 26 ++++++--
 test-data/unit/check-typevar-tuple.test | 87 +++++++++++++++++++++++++
 9 files changed, 149 insertions(+), 21 deletions(-)

diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 7168d7c30b0d..ef8ebe1a9128 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -269,7 +269,8 @@ def visit_unpack_type(self, t: UnpackType) -> Type:
         # instead.
         # However, if the item is a variadic tuple, we can simply carry it over.
         # In particular, if we expand A[*tuple[T, ...]] with substitutions {T: str},
-        # it is hard to assert this without getting proper type.
+        # it is hard to assert this without getting proper type. Another important
+        # example is non-normalized types when called from semanal.py.
         return UnpackType(t.type.accept(self))
 
     def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType:
@@ -414,6 +415,10 @@ def visit_tuple_type(self, t: TupleType) -> Type:
                     unpacked = get_proper_type(item.type)
                     if isinstance(unpacked, Instance):
                         assert unpacked.type.fullname == "builtins.tuple"
+                        if t.partial_fallback.type.fullname != "builtins.tuple":
+                            # If it is a subtype (like named tuple) we need to preserve it,
+                            # this essentially mimics the logic in tuple_fallback().
+                            return t.partial_fallback.accept(self)
                         return unpacked
             fallback = t.partial_fallback.accept(self)
             assert isinstance(fallback, ProperType) and isinstance(fallback, Instance)
diff --git a/mypy/maptype.py b/mypy/maptype.py
index cae904469fed..4951306573c2 100644
--- a/mypy/maptype.py
+++ b/mypy/maptype.py
@@ -113,6 +113,5 @@ def instance_to_type_environment(instance: Instance) -> dict[TypeVarId, Type]:
     required number of type arguments.  So this environment consists
     of the class's type variables mapped to the Instance's actual
     arguments.  The type variables are mapped by their `id`.
-
     """
     return {binder.id: arg for binder, arg in zip(instance.type.defn.type_vars, instance.args)}
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 7efb01c1b18e..9b4ba5e76667 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -3546,7 +3546,12 @@ def from_tuple_type(cls, info: TypeInfo) -> TypeAlias:
         assert info.tuple_type
         # TODO: is it possible to refactor this to set the correct type vars here?
         return TypeAlias(
-            info.tuple_type.copy_modified(fallback=mypy.types.Instance(info, info.defn.type_vars)),
+            info.tuple_type.copy_modified(
+                # Create an Instance similar to fill_typevars().
+                fallback=mypy.types.Instance(
+                    info, mypy.types.type_vars_as_args(info.defn.type_vars)
+                )
+            ),
             info.fullname,
             info.line,
             info.column,
@@ -3563,7 +3568,10 @@ def from_typeddict_type(cls, info: TypeInfo) -> TypeAlias:
         # TODO: is it possible to refactor this to set the correct type vars here?
         return TypeAlias(
             info.typeddict_type.copy_modified(
-                fallback=mypy.types.Instance(info, info.defn.type_vars)
+                # Create an Instance similar to fill_typevars().
+                fallback=mypy.types.Instance(
+                    info, mypy.types.type_vars_as_args(info.defn.type_vars)
+                )
             ),
             info.fullname,
             info.line,
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 55d4e6a3f506..be7e733a0816 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -277,6 +277,7 @@
     get_proper_types,
     is_named_instance,
     remove_dups,
+    type_vars_as_args,
 )
 from mypy.types_utils import is_invalid_recursive_alias, store_argument_type
 from mypy.typevars import fill_typevars
@@ -1702,12 +1703,17 @@ def setup_type_vars(self, defn: ClassDef, tvar_defs: list[TypeVarLikeType]) -> N
     def setup_alias_type_vars(self, defn: ClassDef) -> None:
         assert defn.info.special_alias is not None
         defn.info.special_alias.alias_tvars = list(defn.type_vars)
+        # It is a bit unfortunate that we need to inline some logic from TypeAlias constructor,
+        # but it is required, since type variables may change during semantic analyzer passes.
+        for i, t in enumerate(defn.type_vars):
+            if isinstance(t, TypeVarTupleType):
+                defn.info.special_alias.tvar_tuple_index = i
         target = defn.info.special_alias.target
         assert isinstance(target, ProperType)
         if isinstance(target, TypedDictType):
-            target.fallback.args = tuple(defn.type_vars)
+            target.fallback.args = type_vars_as_args(defn.type_vars)
         elif isinstance(target, TupleType):
-            target.partial_fallback.args = tuple(defn.type_vars)
+            target.partial_fallback.args = type_vars_as_args(defn.type_vars)
         else:
             assert False, f"Unexpected special alias type: {type(target)}"
 
diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py
index 1a37ac57be30..1ae6fada8f38 100644
--- a/mypy/semanal_typeargs.py
+++ b/mypy/semanal_typeargs.py
@@ -86,31 +86,31 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None:
         # correct aliases. Also, variadic aliases are better to check when fully analyzed,
         # so we do this here.
         assert t.alias is not None, f"Unfixed type alias {t.type_ref}"
-        args = flatten_nested_tuples(t.args)
+        # TODO: consider moving this validation to typeanal.py, expanding invalid aliases
+        # during semantic analysis may cause crashes.
         if t.alias.tvar_tuple_index is not None:
-            correct = len(args) >= len(t.alias.alias_tvars) - 1
+            correct = len(t.args) >= len(t.alias.alias_tvars) - 1
             if any(
                 isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance)
-                for a in args
+                for a in t.args
             ):
                 correct = True
         else:
-            correct = len(args) == len(t.alias.alias_tvars)
+            correct = len(t.args) == len(t.alias.alias_tvars)
         if not correct:
             if t.alias.tvar_tuple_index is not None:
                 exp_len = f"at least {len(t.alias.alias_tvars) - 1}"
             else:
                 exp_len = f"{len(t.alias.alias_tvars)}"
             self.fail(
-                f"Bad number of arguments for type alias, expected: {exp_len}, given: {len(args)}",
+                "Bad number of arguments for type alias,"
+                f" expected: {exp_len}, given: {len(t.args)}",
                 t,
                 code=codes.TYPE_ARG,
             )
             t.args = set_any_tvars(
                 t.alias, t.line, t.column, self.options, from_error=True, fail=self.fail
             ).args
-        else:
-            t.args = args
         is_error = self.validate_args(t.alias.name, t.args, t.alias.alias_tvars, t)
         if not is_error:
             # If there was already an error for the alias itself, there is no point in checking
diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py
index aba5bf69b130..fb3fa713e3fb 100644
--- a/mypy/semanal_typeddict.py
+++ b/mypy/semanal_typeddict.py
@@ -252,6 +252,7 @@ def map_items_to_base(
             if not tvars:
                 mapped_items[key] = type_in_base
                 continue
+            # TODO: simple zip can't be used for variadic types.
             mapped_items[key] = expand_type(
                 type_in_base, {t.id: a for (t, a) in zip(tvars, base_args)}
             )
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 1955d2bc3c43..ed1a8073887b 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -82,6 +82,7 @@
     UnionType,
     UnpackType,
     callable_with_ellipsis,
+    flatten_nested_tuples,
     flatten_nested_unions,
     get_proper_type,
     has_type_vars,
@@ -763,8 +764,8 @@ def analyze_type_with_type_info(
             if info.special_alias:
                 return instantiate_type_alias(
                     info.special_alias,
-                    # TODO: should we allow NamedTuples generic in ParamSpec and TypeVarTuple?
-                    self.anal_array(args),
+                    # TODO: should we allow NamedTuples generic in ParamSpec?
+                    self.anal_array(args, allow_unpack=True),
                     self.fail,
                     False,
                     ctx,
@@ -782,7 +783,7 @@ def analyze_type_with_type_info(
                 return instantiate_type_alias(
                     info.special_alias,
                     # TODO: should we allow TypedDicts generic in ParamSpec?
-                    self.anal_array(args),
+                    self.anal_array(args, allow_unpack=True),
                     self.fail,
                     False,
                     ctx,
@@ -1948,7 +1949,10 @@ def instantiate_type_alias(
     # TODO: we need to check args validity w.r.t alias.alias_tvars.
     # Otherwise invalid instantiations will be allowed in runtime context.
     # Note: in type context, these will be still caught by semanal_typeargs.
-    typ = TypeAliasType(node, args, ctx.line, ctx.column)
+    # Type aliases are special, since they can be expanded during semantic analysis,
+    # so we need to normalize them as soon as possible.
+    # TODO: can this cause an infinite recursion?
+    typ = TypeAliasType(node, flatten_nested_tuples(args), ctx.line, ctx.column)
     assert typ.alias is not None
     # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here.
     if (
diff --git a/mypy/types.py b/mypy/types.py
index cf2c343655dd..fb360fb892f1 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1045,9 +1045,12 @@ class UnpackType(ProperType):
     """Type operator Unpack from PEP646. Can be either with Unpack[]
     or unpacking * syntax.
 
-    The inner type should be either a TypeVarTuple, a constant size
-    tuple, or a variable length tuple. Type aliases to these are not allowed,
-    except during semantic analysis.
+    The inner type should be either a TypeVarTuple, or a variable length tuple.
+    In an exceptional case of callable star argument it can be a fixed length tuple.
+
+    Note: the above restrictions are only guaranteed by normalizations after semantic
+    analysis, if your code needs to handle UnpackType *during* semantic analysis, it is
+    wild west, technically anything can be present in the wrapped type.
     """
 
     __slots__ = ["type"]
@@ -2143,7 +2146,11 @@ def with_normalized_var_args(self) -> Self:
                     assert nested_unpacked.type.fullname == "builtins.tuple"
                     new_unpack = nested_unpacked.args[0]
                 else:
-                    assert isinstance(nested_unpacked, TypeVarTupleType)
+                    if not isinstance(nested_unpacked, TypeVarTupleType):
+                        # We found a non-nomralized tuple type, this means this method
+                        # is called during semantic analysis (e.g. from get_proper_type())
+                        # there is no point in normalizing callables at this stage.
+                        return self
                     new_unpack = nested_unpack
             else:
                 new_unpack = UnpackType(
@@ -3587,6 +3594,17 @@ def remove_dups(types: list[T]) -> list[T]:
     return new_types
 
 
+def type_vars_as_args(type_vars: Sequence[TypeVarLikeType]) -> tuple[Type, ...]:
+    """Represent type variables as they would appear in a type argument list."""
+    args: list[Type] = []
+    for tv in type_vars:
+        if isinstance(tv, TypeVarTupleType):
+            args.append(UnpackType(tv))
+        else:
+            args.append(tv)
+    return tuple(args)
+
+
 # This cyclic import is unfortunate, but to avoid it we would need to move away all uses
 # of get_proper_type() from types.py. Majority of them have been removed, but few remaining
 # are quite tricky to get rid of, but ultimately we want to do it at some point.
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index c7716f3e8346..a36c4d4d6741 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -1032,3 +1032,90 @@ Second = Tuple[C, D]
 x: G[Unpack[First], Unpack[Second]]  # E: Type argument "A" of "G" must be a subtype of "int" \
                                      # E: Type argument "D" of "G" must be a subtype of "str"
 [builtins fixtures/tuple.pyi]
+
+[case testVariadicTupleType]
+from typing import Tuple, Callable
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class A(Tuple[Unpack[Ts]]):
+    fn: Callable[[Unpack[Ts]], None]
+
+x: A[int]
+reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, fallback=__main__.A[builtins.int]]"
+reveal_type(x[0])  # N: Revealed type is "builtins.int"
+reveal_type(x.fn)  # N: Revealed type is "def (builtins.int)"
+
+y: A[int, str]
+reveal_type(y)  # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.A[builtins.int, builtins.str]]"
+reveal_type(y[0])  # N: Revealed type is "builtins.int"
+reveal_type(y.fn)  # N: Revealed type is "def (builtins.int, builtins.str)"
+
+z: A[Unpack[Tuple[int, ...]]]
+reveal_type(z)  # N: Revealed type is "__main__.A[Unpack[builtins.tuple[builtins.int, ...]]]"
+# TODO: this requires fixing map_instance_to_supertype().
+# reveal_type(z[0])
+reveal_type(z.fn)  # N: Revealed type is "def (*builtins.int)"
+
+t: A[int, Unpack[Tuple[int, str]], str]
+reveal_type(t)  # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.str, builtins.str, fallback=__main__.A[builtins.int, builtins.int, builtins.str, builtins.str]]"
+reveal_type(t[0])  # N: Revealed type is "builtins.int"
+reveal_type(t.fn)  # N: Revealed type is "def (builtins.int, builtins.int, builtins.str, builtins.str)"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicNamedTuple]
+from typing import Tuple, Callable, NamedTuple, Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class A(NamedTuple, Generic[Unpack[Ts], T]):
+    fn: Callable[[Unpack[Ts]], None]
+    val: T
+
+y: A[int, str]
+reveal_type(y)  # N: Revealed type is "Tuple[def (builtins.int), builtins.str, fallback=__main__.A[builtins.int, builtins.str]]"
+reveal_type(y[0])  # N: Revealed type is "def (builtins.int)"
+reveal_type(y.fn)  # N: Revealed type is "def (builtins.int)"
+
+z: A[Unpack[Tuple[int, ...]]]
+reveal_type(z)  # N: Revealed type is "Tuple[def (*builtins.int), builtins.int, fallback=__main__.A[Unpack[builtins.tuple[builtins.int, ...]], builtins.int]]"
+reveal_type(z.fn)  # N: Revealed type is "def (*builtins.int)"
+
+t: A[int, Unpack[Tuple[int, str]], str]
+reveal_type(t)  # N: Revealed type is "Tuple[def (builtins.int, builtins.int, builtins.str), builtins.str, fallback=__main__.A[builtins.int, builtins.int, builtins.str, builtins.str]]"
+
+def test(x: int, y: str) -> None: ...
+nt = A(fn=test, val=42)
+reveal_type(nt)  # N: Revealed type is "Tuple[def (builtins.int, builtins.str), builtins.int, fallback=__main__.A[builtins.int, builtins.str, builtins.int]]"
+
+def bad() -> int: ...
+nt2 = A(fn=bad, val=42)  # E: Argument "fn" to "A" has incompatible type "Callable[[], int]"; expected "Callable[[], None]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicTypedDict]
+from typing import Tuple, Callable, Generic
+from typing_extensions import TypeVarTuple, Unpack, TypedDict
+
+Ts = TypeVarTuple("Ts")
+class A(TypedDict, Generic[Unpack[Ts], T]):
+    fn: Callable[[Unpack[Ts]], None]
+    val: T
+
+y: A[int, str]
+reveal_type(y)  # N: Revealed type is "TypedDict('__main__.A', {'fn': def (builtins.int), 'val': builtins.str})"
+reveal_type(y["fn"])  # N: Revealed type is "def (builtins.int)"
+
+z: A[Unpack[Tuple[int, ...]]]
+reveal_type(z)  # N: Revealed type is "TypedDict('__main__.A', {'fn': def (*builtins.int), 'val': builtins.int})"
+reveal_type(z["fn"])  # N: Revealed type is "def (*builtins.int)"
+
+t: A[int, Unpack[Tuple[int, str]], str]
+reveal_type(t)  # N: Revealed type is "TypedDict('__main__.A', {'fn': def (builtins.int, builtins.int, builtins.str), 'val': builtins.str})"
+
+def test(x: int, y: str) -> None: ...
+td = A({"fn": test, "val": 42})
+reveal_type(td)  # N: Revealed type is "TypedDict('__main__.A', {'fn': def (builtins.int, builtins.str), 'val': builtins.int})"
+
+def bad() -> int: ...
+td2 = A({"fn": bad, "val": 42})  # E: Incompatible types (expression has type "Callable[[], int]", TypedDict item "fn" has type "Callable[[], None]")
+[builtins fixtures/tuple.pyi]

From d7b24514d7301f86031b7d1e2215cf8c2476bec0 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Sun, 27 Aug 2023 23:20:13 +0100
Subject: [PATCH 080/288] Fixes to stubtest's new check for missing stdlib
 modules (#15960)

- It's not easy to predict where stdlib modules are going to be located.
(It varies between platforms, and between venvs and conda envs; on some
platforms it's in a completely different directory to the Python
executable.)
- Some modules appear to raise `SystemExit` when stubtest tries to
import them in CI, leading stubtest to instantly exit without logging a
message to the terminal.
- Importing some `test.*` submodules leads to unraisable exceptions
being printed to the terminal at the end of the stubtest run, which is
somewhat annoying.
---
 mypy/stubtest.py | 41 ++++++++++++++++++++++++++++++-----------
 1 file changed, 30 insertions(+), 11 deletions(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index 34bb985b702e..a804835a632b 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -23,6 +23,7 @@
 import typing
 import typing_extensions
 import warnings
+from collections import defaultdict
 from contextlib import redirect_stderr, redirect_stdout
 from functools import singledispatch
 from pathlib import Path
@@ -1679,16 +1680,22 @@ def get_importable_stdlib_modules() -> set[str]:
         all_stdlib_modules = sys.stdlib_module_names
     else:
         all_stdlib_modules = set(sys.builtin_module_names)
-        python_exe_dir = Path(sys.executable).parent
+        modules_by_finder: defaultdict[importlib.machinery.FileFinder, set[str]] = defaultdict(set)
         for m in pkgutil.iter_modules():
-            finder = m.module_finder
-            if isinstance(finder, importlib.machinery.FileFinder):
-                finder_path = Path(finder.path)
-                if (
-                    python_exe_dir in finder_path.parents
-                    and "site-packages" not in finder_path.parts
-                ):
-                    all_stdlib_modules.add(m.name)
+            if isinstance(m.module_finder, importlib.machinery.FileFinder):
+                modules_by_finder[m.module_finder].add(m.name)
+        for finder, module_group in modules_by_finder.items():
+            if (
+                "site-packages" not in Path(finder.path).parents
+                # if "_queue" is present, it's most likely the module finder
+                # for stdlib extension modules;
+                # if "queue" is present, it's most likely the module finder
+                # for pure-Python stdlib modules.
+                # In either case, we'll want to add all the modules that the finder has to offer us.
+                # This is a bit hacky, but seems to work well in a cross-platform way.
+                and {"_queue", "queue"} & module_group
+            ):
+                all_stdlib_modules.update(module_group)
 
     importable_stdlib_modules: set[str] = set()
     for module_name in all_stdlib_modules:
@@ -1719,13 +1726,25 @@ def get_importable_stdlib_modules() -> set[str]:
             # The idlelib.* submodules are similarly annoying in opening random tkinter windows,
             # and we're unlikely to ever add stubs for idlelib in typeshed
             # (see discussion in https://github.com/python/typeshed/pull/9193)
-            if submodule_name.endswith(".__main__") or submodule_name.startswith("idlelib."):
+            #
+            # test.* modules do weird things like raising exceptions in __del__ methods,
+            # leading to unraisable exceptions being logged to the terminal
+            # as a warning at the end of the stubtest run
+            if (
+                submodule_name.endswith(".__main__")
+                or submodule_name.startswith("idlelib.")
+                or submodule_name.startswith("test.")
+            ):
                 continue
 
             try:
                 silent_import_module(submodule_name)
+            except KeyboardInterrupt:
+                raise
             # importing multiprocessing.popen_forkserver on Windows raises AttributeError...
-            except Exception:
+            # some submodules also appear to raise SystemExit as well on some Python versions
+            # (not sure exactly which)
+            except BaseException:
                 continue
             else:
                 importable_stdlib_modules.add(submodule_name)

From 010da0b2f48dc92be2f79495fd4551c92351868f Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Mon, 28 Aug 2023 04:03:50 -0400
Subject: [PATCH 081/288] attrs, dataclasses: don't enforce slots when base
 doesn't (#15976)

Doing the same thing we do for regular classes.

Fixes #15975
---
 mypy/plugins/attrs.py                  |  5 +++++
 mypy/plugins/dataclasses.py            |  6 ++++++
 test-data/unit/check-dataclasses.test  | 16 ++++++++++++++++
 test-data/unit/check-plugin-attrs.test | 15 +++++++++++++++
 4 files changed, 42 insertions(+)

diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py
index d444c18852dd..3d326a5f4e80 100644
--- a/mypy/plugins/attrs.py
+++ b/mypy/plugins/attrs.py
@@ -893,6 +893,11 @@ def _add_attrs_magic_attribute(
 
 
 def _add_slots(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) -> None:
+    if any(p.slots is None for p in ctx.cls.info.mro[1:-1]):
+        # At least one type in mro (excluding `self` and `object`)
+        # does not have concrete `__slots__` defined. Ignoring.
+        return
+
     # Unlike `@dataclasses.dataclass`, `__slots__` is rewritten here.
     ctx.cls.info.slots = {attr.name for attr in attributes}
 
diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py
index d782acf50af5..39b597491e9e 100644
--- a/mypy/plugins/dataclasses.py
+++ b/mypy/plugins/dataclasses.py
@@ -443,6 +443,12 @@ def add_slots(
                 self._cls,
             )
             return
+
+        if any(p.slots is None for p in info.mro[1:-1]):
+            # At least one type in mro (excluding `self` and `object`)
+            # does not have concrete `__slots__` defined. Ignoring.
+            return
+
         info.slots = generated_slots
 
         # Now, insert `.__slots__` attribute to class namespace:
diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test
index 7881dfbcf1bb..91c409807497 100644
--- a/test-data/unit/check-dataclasses.test
+++ b/test-data/unit/check-dataclasses.test
@@ -1519,6 +1519,22 @@ class Some:
         self.y = 1  # E: Trying to assign name "y" that is not in "__slots__" of type "__main__.Some"
 [builtins fixtures/dataclasses.pyi]
 
+[case testDataclassWithSlotsDerivedFromNonSlot]
+# flags: --python-version 3.10
+from dataclasses import dataclass
+
+class A:
+    pass
+
+@dataclass(slots=True)
+class B(A):
+    x: int
+
+    def __post_init__(self) -> None:
+        self.y = 42
+
+[builtins fixtures/dataclasses.pyi]
+
 [case testDataclassWithSlotsConflict]
 # flags: --python-version 3.10
 from dataclasses import dataclass
diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test
index 7580531bebc9..e8598132c50e 100644
--- a/test-data/unit/check-plugin-attrs.test
+++ b/test-data/unit/check-plugin-attrs.test
@@ -1677,6 +1677,21 @@ class C:
         self.c = 2  # E: Trying to assign name "c" that is not in "__slots__" of type "__main__.C"
 [builtins fixtures/plugin_attrs.pyi]
 
+[case testAttrsClassWithSlotsDerivedFromNonSlots]
+import attrs
+
+class A:
+    pass
+
+@attrs.define(slots=True)
+class B(A):
+    x: int
+
+    def __attrs_post_init__(self) -> None:
+        self.y = 42
+
+[builtins fixtures/plugin_attrs.pyi]
+
 [case testRuntimeSlotsAttr]
 from attr import dataclass
 

From 171402834faece2e20760f0d02e96aa3714324c2 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Tue, 29 Aug 2023 10:17:52 +0100
Subject: [PATCH 082/288] Optimize Unpack for failures (#15967)

This is a small but possibly important PR. Wherever possible we should
represent user error and/or failed type inference as `*tuple[Any,
...]`/`*tuple[<nothing>, ...]`, rather than
`Unpack[Any]`/`Unpack[<nothing>]` or plain `Any`/`<nothing>`. This way
we will not need any special casing for failure conditions in various
places without risking a crash instead of a graceful failure (error
message).
---
 mypy/expandtype.py                      | 23 ++++++-----------------
 mypy/semanal_main.py                    |  2 ++
 mypy/semanal_typeargs.py                | 21 ++++++++++++++-------
 test-data/unit/check-typevar-tuple.test |  5 ++---
 4 files changed, 24 insertions(+), 27 deletions(-)

diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index ef8ebe1a9128..26353c043cb7 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -273,7 +273,7 @@ def visit_unpack_type(self, t: UnpackType) -> Type:
         # example is non-normalized types when called from semanal.py.
         return UnpackType(t.type.accept(self))
 
-    def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType:
+    def expand_unpack(self, t: UnpackType) -> list[Type]:
         assert isinstance(t.type, TypeVarTupleType)
         repl = get_proper_type(self.variables.get(t.type.id, t.type))
         if isinstance(repl, TupleType):
@@ -285,9 +285,9 @@ def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType
         ):
             return [UnpackType(typ=repl)]
         elif isinstance(repl, (AnyType, UninhabitedType)):
-            # tuple[Any, ...] for Any would be better, but we don't have
-            # the type info to construct that type here.
-            return repl
+            # Replace *Ts = Any with *Ts = *tuple[Any, ...] and some for <nothing>.
+            # These types may appear here as a result of user error or failed inference.
+            return [UnpackType(t.type.tuple_fallback.copy_modified(args=[repl]))]
         else:
             raise RuntimeError(f"Invalid type replacement to expand: {repl}")
 
@@ -310,12 +310,7 @@ def interpolate_args_for_unpack(self, t: CallableType, var_arg: UnpackType) -> l
             # We have plain Unpack[Ts]
             assert isinstance(var_arg_type, TypeVarTupleType)
             fallback = var_arg_type.tuple_fallback
-            expanded_items_res = self.expand_unpack(var_arg)
-            if isinstance(expanded_items_res, list):
-                expanded_items = expanded_items_res
-            else:
-                # We got Any or <nothing>
-                return prefix + [expanded_items_res] + suffix
+            expanded_items = self.expand_unpack(var_arg)
         new_unpack = UnpackType(TupleType(expanded_items, fallback))
         return prefix + [new_unpack] + suffix
 
@@ -394,14 +389,8 @@ def expand_types_with_unpack(
         items: list[Type] = []
         for item in typs:
             if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType):
-                unpacked_items = self.expand_unpack(item)
-                if isinstance(unpacked_items, (AnyType, UninhabitedType)):
-                    # TODO: better error for <nothing>, something like tuple of unknown?
-                    return unpacked_items
-                else:
-                    items.extend(unpacked_items)
+                items.extend(self.expand_unpack(item))
             else:
-                # Must preserve original aliases when possible.
                 items.append(item.accept(self))
         return items
 
diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py
index 51a7014fac1a..ec09deb0952f 100644
--- a/mypy/semanal_main.py
+++ b/mypy/semanal_main.py
@@ -381,6 +381,7 @@ def check_type_arguments(graph: Graph, scc: list[str], errors: Errors) -> None:
             errors,
             state.options,
             is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""),
+            state.manager.semantic_analyzer.named_type,
         )
         with state.wrap_context():
             with mypy.state.state.strict_optional_set(state.options.strict_optional):
@@ -399,6 +400,7 @@ def check_type_arguments_in_targets(
         errors,
         state.options,
         is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""),
+        state.manager.semantic_analyzer.named_type,
     )
     with state.wrap_context():
         with mypy.state.state.strict_optional_set(state.options.strict_optional):
diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py
index 1ae6fada8f38..749b02391e06 100644
--- a/mypy/semanal_typeargs.py
+++ b/mypy/semanal_typeargs.py
@@ -7,7 +7,7 @@
 
 from __future__ import annotations
 
-from typing import Sequence
+from typing import Callable, Sequence
 
 from mypy import errorcodes as codes, message_registry
 from mypy.errorcodes import ErrorCode
@@ -42,11 +42,18 @@
 
 
 class TypeArgumentAnalyzer(MixedTraverserVisitor):
-    def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None:
+    def __init__(
+        self,
+        errors: Errors,
+        options: Options,
+        is_typeshed_file: bool,
+        named_type: Callable[[str, list[Type]], Instance],
+    ) -> None:
         super().__init__()
         self.errors = errors
         self.options = options
         self.is_typeshed_file = is_typeshed_file
+        self.named_type = named_type
         self.scope = Scope()
         # Should we also analyze function definitions, or only module top-levels?
         self.recurse_into_functions = True
@@ -243,16 +250,16 @@ def visit_unpack_type(self, typ: UnpackType) -> None:
             return
         if isinstance(proper_type, TypeVarTupleType):
             return
+        # TODO: this should probably be .has_base("builtins.tuple"), also elsewhere.
         if isinstance(proper_type, Instance) and proper_type.type.fullname == "builtins.tuple":
             return
-        if isinstance(proper_type, AnyType) and proper_type.type_of_any == TypeOfAny.from_error:
-            return
-        if not isinstance(proper_type, UnboundType):
-            # Avoid extra errors if there were some errors already.
+        if not isinstance(proper_type, (UnboundType, AnyType)):
+            # Avoid extra errors if there were some errors already. Also interpret plain Any
+            # as tuple[Any, ...] (this is better for the code in type checker).
             self.fail(
                 message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ
             )
-        typ.type = AnyType(TypeOfAny.from_error)
+        typ.type = self.named_type("builtins.tuple", [AnyType(TypeOfAny.from_error)])
 
     def check_type_var_values(
         self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index a36c4d4d6741..c8b33ec96b06 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -17,8 +17,7 @@ reveal_type(f(args))  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
 
 reveal_type(f(varargs))  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
 
-if object():
-    f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected <nothing>
+f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[<nothing>, ...]"
 
 def g(a: Tuple[Unpack[Ts]], b: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]:
     return a
@@ -26,7 +25,7 @@ def g(a: Tuple[Unpack[Ts]], b: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]:
 reveal_type(g(args, args))  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
 reveal_type(g(args, args2))  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
 reveal_type(g(args, args3))  # N: Revealed type is "builtins.tuple[builtins.object, ...]"
-reveal_type(g(any, any))  # N: Revealed type is "Any"
+reveal_type(g(any, any))  # N: Revealed type is "builtins.tuple[Any, ...]"
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTupleMixed]

From 6c16143c3a68c99f6e4c99974c44cf3abf867103 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Tue, 29 Aug 2023 23:12:02 +0300
Subject: [PATCH 083/288] Improve GitHub Actions specs (#15965)

Two main changes:
1. Always use secure permissions, when some workflow does not do
anything, it has to be `contents: read` only
2. Be more consistent with canceling workflows
---
 .github/workflows/build_wheels.yml  | 3 +++
 .github/workflows/docs.yml          | 7 +++++++
 .github/workflows/mypy_primer.yml   | 3 +++
 .github/workflows/test.yml          | 3 +++
 .github/workflows/test_stubgenc.yml | 7 +++++++
 5 files changed, 23 insertions(+)

diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml
index e728d741d90d..3f4ea5e42f9b 100644
--- a/.github/workflows/build_wheels.yml
+++ b/.github/workflows/build_wheels.yml
@@ -5,6 +5,9 @@ on:
     branches: [main, master, 'release*']
     tags: ['*']
 
+permissions:
+  contents: write
+
 jobs:
   build-wheels:
     if: github.repository == 'python/mypy'
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 5dc86a1159f4..9f3a6121ae16 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -14,6 +14,13 @@ on:
     - CREDITS
     - LICENSE
 
+permissions:
+  contents: read
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 jobs:
   docs:
     runs-on: ubuntu-latest
diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml
index e7e4af1f07b7..2958b8fc325b 100644
--- a/.github/workflows/mypy_primer.yml
+++ b/.github/workflows/mypy_primer.yml
@@ -15,6 +15,9 @@ on:
     - 'mypy/test/**'
     - 'test-data/**'
 
+permissions:
+  contents: read
+
 concurrency:
   group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
   cancel-in-progress: true
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index f594353ed05a..0e335a59d1d0 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -15,6 +15,9 @@ on:
     - CREDITS
     - LICENSE
 
+permissions:
+  contents: read
+
 concurrency:
   group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
   cancel-in-progress: true
diff --git a/.github/workflows/test_stubgenc.yml b/.github/workflows/test_stubgenc.yml
index db9bf413faa3..33466b9870ff 100644
--- a/.github/workflows/test_stubgenc.yml
+++ b/.github/workflows/test_stubgenc.yml
@@ -12,6 +12,13 @@ on:
     - 'mypy/stubdoc.py'
     - 'test-data/stubgen/**'
 
+permissions:
+  contents: read
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 jobs:
   stubgenc:
     # Check stub file generation for a small pybind11 project

From d6df8e883e927920bbe50aab779e7591e31533c6 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Tue, 29 Aug 2023 19:29:19 -0400
Subject: [PATCH 084/288] dataclasses.replace: fall through to typeshed sig
 (#15962)

If the dataclasses plugin cannot determine a signature for
`dataclasses.replace`, it should not report an error. The underlying
typeshed signature will get a shot at verifying the type and reporting
an error, and it would enable the following pattern (without typing
`replace`'s kwargs, though)
---
 mypy/plugins/dataclasses.py                   | 25 +----------
 test-data/unit/check-dataclass-transform.test |  2 +-
 test-data/unit/check-dataclasses.test         | 44 +++++++++++++++----
 test-data/unit/lib-stub/dataclasses.pyi       | 20 ++++++++-
 4 files changed, 55 insertions(+), 36 deletions(-)

diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py
index 39b597491e9e..8b34c28b6832 100644
--- a/mypy/plugins/dataclasses.py
+++ b/mypy/plugins/dataclasses.py
@@ -972,25 +972,6 @@ def _has_direct_dataclass_transform_metaclass(info: TypeInfo) -> bool:
     )
 
 
-def _fail_not_dataclass(ctx: FunctionSigContext, t: Type, parent_t: Type) -> None:
-    t_name = format_type_bare(t, ctx.api.options)
-    if parent_t is t:
-        msg = (
-            f'Argument 1 to "replace" has a variable type "{t_name}" not bound to a dataclass'
-            if isinstance(t, TypeVarType)
-            else f'Argument 1 to "replace" has incompatible type "{t_name}"; expected a dataclass'
-        )
-    else:
-        pt_name = format_type_bare(parent_t, ctx.api.options)
-        msg = (
-            f'Argument 1 to "replace" has type "{pt_name}" whose item "{t_name}" is not bound to a dataclass'
-            if isinstance(t, TypeVarType)
-            else f'Argument 1 to "replace" has incompatible type "{pt_name}" whose item "{t_name}" is not a dataclass'
-        )
-
-    ctx.api.fail(msg, ctx.context)
-
-
 def _get_expanded_dataclasses_fields(
     ctx: FunctionSigContext, typ: ProperType, display_typ: ProperType, parent_typ: ProperType
 ) -> list[CallableType] | None:
@@ -999,9 +980,7 @@ def _get_expanded_dataclasses_fields(
     For generic classes, the field types are expanded.
     If the type contains Any or a non-dataclass, returns None; in the latter case, also reports an error.
     """
-    if isinstance(typ, AnyType):
-        return None
-    elif isinstance(typ, UnionType):
+    if isinstance(typ, UnionType):
         ret: list[CallableType] | None = []
         for item in typ.relevant_items():
             item = get_proper_type(item)
@@ -1018,14 +997,12 @@ def _get_expanded_dataclasses_fields(
     elif isinstance(typ, Instance):
         replace_sym = typ.type.get_method(_INTERNAL_REPLACE_SYM_NAME)
         if replace_sym is None:
-            _fail_not_dataclass(ctx, display_typ, parent_typ)
             return None
         replace_sig = replace_sym.type
         assert isinstance(replace_sig, ProperType)
         assert isinstance(replace_sig, CallableType)
         return [expand_type_by_instance(replace_sig, typ)]
     else:
-        _fail_not_dataclass(ctx, display_typ, parent_typ)
         return None
 
 
diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test
index 9029582ece82..58cd5e5a90f8 100644
--- a/test-data/unit/check-dataclass-transform.test
+++ b/test-data/unit/check-dataclass-transform.test
@@ -853,7 +853,7 @@ class Person:
     name: str
 
 p = Person('John')
-y = replace(p, name='Bob')  # E: Argument 1 to "replace" has incompatible type "Person"; expected a dataclass
+y = replace(p, name='Bob')
 
 [typing fixtures/typing-full.pyi]
 [builtins fixtures/dataclasses.pyi]
diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test
index 91c409807497..1f5f5635de4e 100644
--- a/test-data/unit/check-dataclasses.test
+++ b/test-data/unit/check-dataclasses.test
@@ -2122,6 +2122,8 @@ a2 = replace(a, x='42', q=42)  # E: Argument "x" to "replace" of "A" has incompa
 a2 = replace(a, q='42')  # E: Argument "q" to "replace" of "A" has incompatible type "str"; expected "int"
 reveal_type(a2)  # N: Revealed type is "__main__.A"
 
+[builtins fixtures/tuple.pyi]
+
 [case testReplaceUnion]
 from typing import Generic, Union, TypeVar
 from dataclasses import dataclass, replace, InitVar
@@ -2151,7 +2153,7 @@ _ = replace(a_or_b, x=42, y=True, z='42', init_var=42)  # E: Argument "z" to "re
 _ = replace(a_or_b, x=42, y=True, w={}, init_var=42)  # E: Argument "w" to "replace" of "Union[A[int], B]" has incompatible type "Dict[<nothing>, <nothing>]"; expected <nothing>
 _ = replace(a_or_b, y=42, init_var=42)  # E: Argument "y" to "replace" of "Union[A[int], B]" has incompatible type "int"; expected "bool"
 
-[builtins fixtures/dataclasses.pyi]
+[builtins fixtures/tuple.pyi]
 
 [case testReplaceUnionOfTypeVar]
 from typing import Generic, Union, TypeVar
@@ -2171,7 +2173,9 @@ TA = TypeVar('TA', bound=A)
 TB = TypeVar('TB', bound=B)
 
 def f(b_or_t: Union[TA, TB, int]) -> None:
-    a2 = replace(b_or_t)   # E: Argument 1 to "replace" has type "Union[TA, TB, int]" whose item "TB" is not bound to a dataclass  # E: Argument 1 to "replace" has incompatible type "Union[TA, TB, int]" whose item "int" is not a dataclass
+    a2 = replace(b_or_t)   # E: Value of type variable "_DataclassT" of "replace" cannot be "Union[TA, TB, int]"
+
+[builtins fixtures/tuple.pyi]
 
 [case testReplaceTypeVarBoundNotDataclass]
 from dataclasses import dataclass, replace
@@ -2183,16 +2187,18 @@ TNone = TypeVar('TNone', bound=None)
 TUnion = TypeVar('TUnion', bound=Union[str, int])
 
 def f1(t: TInt) -> None:
-    _ = replace(t, x=42)  # E: Argument 1 to "replace" has a variable type "TInt" not bound to a dataclass
+    _ = replace(t, x=42)  # E: Value of type variable "_DataclassT" of "replace" cannot be "TInt"
 
 def f2(t: TAny) -> TAny:
-    return replace(t, x='spam')  # E: Argument 1 to "replace" has a variable type "TAny" not bound to a dataclass
+    return replace(t, x='spam')  # E: Value of type variable "_DataclassT" of "replace" cannot be "TAny"
 
 def f3(t: TNone) -> TNone:
-    return replace(t, x='spam')  # E: Argument 1 to "replace" has a variable type "TNone" not bound to a dataclass
+    return replace(t, x='spam')  # E: Value of type variable "_DataclassT" of "replace" cannot be "TNone"
 
 def f4(t: TUnion) -> TUnion:
-    return replace(t, x='spam')  # E: Argument 1 to "replace" has incompatible type "TUnion" whose item "str" is not a dataclass  # E: Argument 1 to "replace" has incompatible type "TUnion" whose item "int" is not a dataclass
+    return replace(t, x='spam')  # E: Value of type variable "_DataclassT" of "replace" cannot be "TUnion"
+
+[builtins fixtures/tuple.pyi]
 
 [case testReplaceTypeVarBound]
 from dataclasses import dataclass, replace
@@ -2217,6 +2223,8 @@ def f(t: TA) -> TA:
 f(A(x=42))
 f(B(x=42))
 
+[builtins fixtures/tuple.pyi]
+
 [case testReplaceAny]
 from dataclasses import replace
 from typing import Any
@@ -2225,17 +2233,33 @@ a: Any
 a2 = replace(a)
 reveal_type(a2)  # N: Revealed type is "Any"
 
+[builtins fixtures/tuple.pyi]
+
 [case testReplaceNotDataclass]
 from dataclasses import replace
 
-replace(5)  # E: Argument 1 to "replace" has incompatible type "int"; expected a dataclass
+replace(5)  # E: Value of type variable "_DataclassT" of "replace" cannot be "int"
 
 class C:
     pass
 
-replace(C())  # E: Argument 1 to "replace" has incompatible type "C"; expected a dataclass
+replace(C())  # E: Value of type variable "_DataclassT" of "replace" cannot be "C"
 
-replace(None)  # E: Argument 1 to "replace" has incompatible type "None"; expected a dataclass
+replace(None)  # E: Value of type variable "_DataclassT" of "replace" cannot be "None"
+
+[builtins fixtures/tuple.pyi]
+
+[case testReplaceIsDataclass]
+from dataclasses import is_dataclass, replace
+
+def f(x: object) -> None:
+  _ = replace(x)  # E: Value of type variable "_DataclassT" of "replace" cannot be "object"
+  if is_dataclass(x):
+    _ = replace(x)  # E: Value of type variable "_DataclassT" of "replace" cannot be "Union[DataclassInstance, Type[DataclassInstance]]"
+    if not isinstance(x, type):
+      _ = replace(x)
+
+[builtins fixtures/tuple.pyi]
 
 [case testReplaceGeneric]
 from dataclasses import dataclass, replace, InitVar
@@ -2254,6 +2278,8 @@ reveal_type(a2)  # N: Revealed type is "__main__.A[builtins.int]"
 a2 = replace(a, x='42')  # E: Argument "x" to "replace" of "A[int]" has incompatible type "str"; expected "int"
 reveal_type(a2)  # N: Revealed type is "__main__.A[builtins.int]"
 
+[builtins fixtures/tuple.pyi]
+
 [case testPostInitCorrectSignature]
 from typing import Any, Generic, TypeVar, Callable, Self
 from dataclasses import dataclass, InitVar
diff --git a/test-data/unit/lib-stub/dataclasses.pyi b/test-data/unit/lib-stub/dataclasses.pyi
index b2b48c2ae486..cf43747757bd 100644
--- a/test-data/unit/lib-stub/dataclasses.pyi
+++ b/test-data/unit/lib-stub/dataclasses.pyi
@@ -1,6 +1,14 @@
-from typing import Any, Callable, Generic, Mapping, Optional, TypeVar, overload, Type
+from typing import Any, Callable, Generic, Literal, Mapping, Optional, TypeVar, overload, Type, \
+    Protocol, ClassVar
+from typing_extensions import TypeGuard
+
+# DataclassInstance is in _typeshed.pyi normally, but alas we can't do the same for lib-stub
+# due to test-data/unit/lib-stub/builtins.pyi not having 'tuple'.
+class DataclassInstance(Protocol):
+    __dataclass_fields__: ClassVar[dict[str, Field[Any]]]
 
 _T = TypeVar('_T')
+_DataclassT = TypeVar("_DataclassT", bound=DataclassInstance)
 
 class InitVar(Generic[_T]):
     ...
@@ -33,4 +41,12 @@ def field(*,
 
 class Field(Generic[_T]): pass
 
-def replace(__obj: _T, **changes: Any) -> _T: ...
+@overload
+def is_dataclass(obj: DataclassInstance) -> Literal[True]: ...
+@overload
+def is_dataclass(obj: type) -> TypeGuard[type[DataclassInstance]]: ...
+@overload
+def is_dataclass(obj: object) -> TypeGuard[DataclassInstance | type[DataclassInstance]]: ...
+
+
+def replace(__obj: _DataclassT, **changes: Any) -> _DataclassT: ...

From 379b52f2cfff4955589df714cb2dd904be482e76 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Tue, 29 Aug 2023 17:46:46 -0700
Subject: [PATCH 085/288] Try upgrading tox (#15992)

Fixes #15990
---
 .github/workflows/docs.yml | 2 +-
 .github/workflows/test.yml | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 9f3a6121ae16..037738d4b3aa 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -34,7 +34,7 @@ jobs:
         with:
           python-version: '3.8'
       - name: Install tox
-        run: pip install --upgrade 'setuptools!=50' tox==4.4.4
+        run: pip install --upgrade 'setuptools!=50' tox==4.11.0
       - name: Setup tox environment
         run: tox run -e ${{ env.TOXENV }} --notest
       - name: Test
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 0e335a59d1d0..d2e7e7258500 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -133,7 +133,7 @@ jobs:
         ./misc/build-debug-python.sh $PYTHONVERSION $PYTHONDIR $VENV
         source $VENV/bin/activate
     - name: Install tox
-      run: pip install --upgrade 'setuptools!=50' tox==4.4.4
+      run: pip install --upgrade 'setuptools!=50' tox==4.11.0
     - name: Compiled with mypyc
       if: ${{ matrix.test_mypyc }}
       run: |
@@ -185,7 +185,7 @@ jobs:
           default: 3.11.1
           command: python -c "import platform; print(f'{platform.architecture()=} {platform.machine()=}');"
       - name: Install tox
-        run: pip install --upgrade 'setuptools!=50' tox==4.4.4
+        run: pip install --upgrade 'setuptools!=50' tox==4.11.0
       - name: Setup tox environment
         run: tox run -e py --notest
       - name: Test

From 2298829ab3b7339427ec957ec5c21955d3657c6f Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Tue, 29 Aug 2023 20:49:24 -0400
Subject: [PATCH 086/288] attrs: remove fields type check (#15983)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Since https://github.com/python-attrs/attrs/pull/890 (≥ 22.1.0)
`attrs.fields` is typed to accept a protocol.
Since https://github.com/python-attrs/attrs/pull/997 (≥ 22.2.0)
`attrs.has` is a type-guard.

Support both by removing the explicit error reporting and letting it
fall through to the type stub.

Fixes #15980.
---
 mypy/plugins/attrs.py                      |  5 -----
 test-data/unit/check-plugin-attrs.test     | 16 ++++++++++------
 test-data/unit/lib-stub/attrs/__init__.pyi | 13 ++++++++++---
 3 files changed, 20 insertions(+), 14 deletions(-)

diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py
index 3d326a5f4e80..6f5b6f35da07 100644
--- a/mypy/plugins/attrs.py
+++ b/mypy/plugins/attrs.py
@@ -1111,9 +1111,4 @@ def fields_function_sig_callback(ctx: mypy.plugin.FunctionSigContext) -> Callabl
         assert ret_type is not None
         return ctx.default_signature.copy_modified(arg_types=arg_types, ret_type=ret_type)
 
-    ctx.api.fail(
-        f'Argument 1 to "fields" has incompatible type "{format_type_bare(proper_type, ctx.api.options)}"; expected an attrs class',
-        ctx.context,
-    )
-
     return ctx.default_signature
diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test
index e8598132c50e..1465bab2bb7b 100644
--- a/test-data/unit/check-plugin-attrs.test
+++ b/test-data/unit/check-plugin-attrs.test
@@ -1596,16 +1596,18 @@ def f(t: TA) -> None:
 [builtins fixtures/plugin_attrs.pyi]
 
 [case testNonattrsFields]
-# flags: --no-strict-optional
 from typing import Any, cast, Type
-from attrs import fields
+from attrs import fields, has
 
 class A:
     b: int
     c: str
 
-fields(A)  # E: Argument 1 to "fields" has incompatible type "Type[A]"; expected an attrs class
-fields(None)  # E: Argument 1 to "fields" has incompatible type "None"; expected an attrs class
+if has(A):
+    fields(A)
+else:
+    fields(A)  # E: Argument 1 to "fields" has incompatible type "Type[A]"; expected "Type[AttrsInstance]"
+fields(None)  # E: Argument 1 to "fields" has incompatible type "None"; expected "Type[AttrsInstance]"
 fields(cast(Any, 42))
 fields(cast(Type[Any], 43))
 
@@ -2167,7 +2169,8 @@ TA = TypeVar('TA', bound=A)
 TB = TypeVar('TB', bound=B)
 
 def f(b_or_t: TA | TB | int) -> None:
-    a2 = attrs.evolve(b_or_t)   # E: Argument 1 to "evolve" has type "Union[TA, TB, int]" whose item "TB" is not bound to an attrs class  # E: Argument 1 to "evolve" has incompatible type "Union[TA, TB, int]" whose item "int" is not an attrs class
+    a2 = attrs.evolve(b_or_t)  # E: Argument 1 to "evolve" has type "Union[TA, TB, int]" whose item "TB" is not bound to an attrs class \
+                               # E: Argument 1 to "evolve" has incompatible type "Union[TA, TB, int]" whose item "int" is not an attrs class
 
 
 [builtins fixtures/plugin_attrs.pyi]
@@ -2216,7 +2219,8 @@ def h(t: TNone) -> None:
     _ = attrs.evolve(t, x=42)  # E: Argument 1 to "evolve" has a variable type "TNone" not bound to an attrs class
 
 def x(t: TUnion) -> None:
-    _ = attrs.evolve(t, x=42)  # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "str" is not an attrs class  # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "int" is not an attrs class
+    _ = attrs.evolve(t, x=42)  # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "str" is not an attrs class \
+                               # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "int" is not an attrs class
 
 [builtins fixtures/plugin_attrs.pyi]
 
diff --git a/test-data/unit/lib-stub/attrs/__init__.pyi b/test-data/unit/lib-stub/attrs/__init__.pyi
index a575f97da9bc..7a88170d7271 100644
--- a/test-data/unit/lib-stub/attrs/__init__.pyi
+++ b/test-data/unit/lib-stub/attrs/__init__.pyi
@@ -1,7 +1,14 @@
-from typing import TypeVar, overload, Callable, Any, Optional, Union, Sequence, Mapping, Generic
+from typing import TypeVar, overload, Callable, Any, Optional, Union, Sequence, Mapping, \
+    Protocol, ClassVar, Type
+from typing_extensions import TypeGuard
 
 from attr import Attribute as Attribute
 
+
+class AttrsInstance(Protocol):
+    __attrs_attrs__: ClassVar[Any]
+
+
 _T = TypeVar('_T')
 _C = TypeVar('_C', bound=type)
 
@@ -131,5 +138,5 @@ def field(
 
 def evolve(inst: _T, **changes: Any) -> _T: ...
 def assoc(inst: _T, **changes: Any) -> _T: ...
-
-def fields(cls: type) -> Any: ...
+def has(cls: type) -> TypeGuard[Type[AttrsInstance]]: ...
+def fields(cls: Type[AttrsInstance]) -> Any: ...

From 5783af495f22e2abc42b3c153b0bea2faa9b72e7 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Tue, 29 Aug 2023 18:50:20 -0700
Subject: [PATCH 087/288] Fix inference for properties with __call__ (#15926)

Fixes #5858
---
 mypy/checkmember.py                 | 21 +++++++++++++--------
 test-data/unit/check-functions.test | 17 +++++++++++++++++
 2 files changed, 30 insertions(+), 8 deletions(-)

diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 1bdc00a6eb59..f7d002f17eb9 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -2,7 +2,7 @@
 
 from __future__ import annotations
 
-from typing import TYPE_CHECKING, Callable, Sequence, cast
+from typing import TYPE_CHECKING, Callable, Optional, Sequence, cast
 
 from mypy import meet, message_registry, subtypes
 from mypy.erasetype import erase_typevars
@@ -776,12 +776,17 @@ def analyze_var(
         freeze_all_type_vars(t)
         result: Type = t
         typ = get_proper_type(typ)
-        if (
-            var.is_initialized_in_class
-            and (not is_instance_var(var) or mx.is_operator)
-            and isinstance(typ, FunctionLike)
-            and not typ.is_type_obj()
-        ):
+
+        call_type: Optional[ProperType] = None
+        if var.is_initialized_in_class and (not is_instance_var(var) or mx.is_operator):
+            if isinstance(typ, FunctionLike) and not typ.is_type_obj():
+                call_type = typ
+            elif var.is_property:
+                call_type = get_proper_type(_analyze_member_access("__call__", typ, mx))
+            else:
+                call_type = typ
+
+        if isinstance(call_type, FunctionLike) and not call_type.is_type_obj():
             if mx.is_lvalue:
                 if var.is_property:
                     if not var.is_settable_property:
@@ -792,7 +797,7 @@ def analyze_var(
             if not var.is_staticmethod:
                 # Class-level function objects and classmethods become bound methods:
                 # the former to the instance, the latter to the class.
-                functype = typ
+                functype: FunctionLike = call_type
                 # Use meet to narrow original_type to the dispatched type.
                 # For example, assume
                 # * A.f: Callable[[A1], None] where A1 <: A (maybe A1 == A)
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
index f49541420cc0..4cc523a595d1 100644
--- a/test-data/unit/check-functions.test
+++ b/test-data/unit/check-functions.test
@@ -3158,3 +3158,20 @@ class C(A, B):
 class D(A, B):
     def f(self, z: int) -> str: pass  # E: Method "f" is not using @override but is overriding a method in class "__main__.A"
 [typing fixtures/typing-override.pyi]
+
+[case testCallableProperty]
+from typing import Callable
+
+class something_callable:
+    def __call__(self, fn) -> str: ...
+
+def decorator(fn: Callable[..., int]) -> something_callable: ...
+
+class A:
+    @property
+    @decorator
+    def f(self) -> int: ...
+
+reveal_type(A.f)  # N: Revealed type is "__main__.something_callable"
+reveal_type(A().f)  # N: Revealed type is "builtins.str"
+[builtins fixtures/property.pyi]

From 0ae0c750b7c39c875f5ea536408143fe32d920d8 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 30 Aug 2023 03:08:05 +0100
Subject: [PATCH 088/288] Fix ParamSpec inference for callback protocols
 (#15986)

Fixes https://github.com/python/mypy/issues/15984

Fix is straightforward, `ParamSpec` inference special-casing should put
instances with `__call__` and callable types on same ground.
---
 mypy/checkexpr.py                                 |  4 ++++
 test-data/unit/check-parameter-specification.test | 15 +++++++++++++++
 2 files changed, 19 insertions(+)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 4430d0773cfa..218568007b9e 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -2185,6 +2185,10 @@ def get_arg_infer_passes(
                     #   run(test, 1, 2)
                     # we will use `test` for inference, since it will allow to infer also
                     # argument *names* for P <: [x: int, y: int].
+                    if isinstance(p_actual, Instance):
+                        call_method = find_member("__call__", p_actual, p_actual, is_operator=True)
+                        if call_method is not None:
+                            p_actual = get_proper_type(call_method)
                     if (
                         isinstance(p_actual, CallableType)
                         and not p_actual.variables
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index ed1d59b376d2..a98c92ce14e7 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1824,3 +1824,18 @@ class C(Generic[P]): ...
 c: C[int, [int, str], str]  # E: Nested parameter specifications are not allowed
 reveal_type(c)  # N: Revealed type is "__main__.C[Any]"
 [builtins fixtures/paramspec.pyi]
+
+[case testParamSpecInferenceWithCallbackProtocol]
+from typing import Protocol, Callable, ParamSpec
+
+class CB(Protocol):
+    def __call__(self, x: str, y: int) -> None: ...
+
+P = ParamSpec('P')
+def g(fn: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ...
+
+cb: CB
+g(cb, y=0, x='a')  # OK
+g(cb, y='a', x=0)  # E: Argument "y" to "g" has incompatible type "str"; expected "int" \
+                   # E: Argument "x" to "g" has incompatible type "int"; expected "str"
+[builtins fixtures/paramspec.pyi]

From a7e0f6f8b0ec5de2fe7b804c9ac7160893ae5bf8 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Tue, 29 Aug 2023 22:13:01 -0400
Subject: [PATCH 089/288] Add hint for AsyncIterator incompatible return type
 (#15883)

For issue described in #5070 and documented in #14973, add a contextual
link to the docs.
---
 mypy/messages.py                      | 16 ++++++++++++++++
 test-data/unit/check-async-await.test | 16 ++++++++++++++++
 2 files changed, 32 insertions(+)

diff --git a/mypy/messages.py b/mypy/messages.py
index aab30ee29108..1933b74d27bd 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -1310,6 +1310,22 @@ def return_type_incompatible_with_supertype(
             code=codes.OVERRIDE,
         )
 
+        original = get_proper_type(original)
+        override = get_proper_type(override)
+        if (
+            isinstance(original, Instance)
+            and isinstance(override, Instance)
+            and override.type.fullname == "typing.AsyncIterator"
+            and original.type.fullname == "typing.Coroutine"
+            and len(original.args) == 3
+            and original.args[2] == override
+        ):
+            self.note(f'Consider declaring "{name}" in {target} without "async"', context)
+            self.note(
+                "See https://mypy.readthedocs.io/en/stable/more_types.html#asynchronous-iterators",
+                context,
+            )
+
     def override_target(self, name: str, name_in_super: str, supertype: str) -> str:
         target = f'supertype "{supertype}"'
         if name_in_super != name:
diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test
index 653025a0bb24..7afdbd687135 100644
--- a/test-data/unit/check-async-await.test
+++ b/test-data/unit/check-async-await.test
@@ -1021,3 +1021,19 @@ def coro() -> Generator[int, None, None]:
 reveal_type(coro)  # N: Revealed type is "def () -> typing.AwaitableGenerator[builtins.int, None, None, typing.Generator[builtins.int, None, None]]"
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-async.pyi]
+
+[case asyncIteratorInProtocol]
+from typing import AsyncIterator, Protocol
+
+class P(Protocol):
+    async def launch(self) -> AsyncIterator[int]:
+        raise BaseException
+
+class Launcher(P):
+    def launch(self) -> AsyncIterator[int]:  # E: Return type "AsyncIterator[int]" of "launch" incompatible with return type "Coroutine[Any, Any, AsyncIterator[int]]" in supertype "P" \
+                                             # N: Consider declaring "launch" in supertype "P" without "async" \
+                                             # N: See https://mypy.readthedocs.io/en/stable/more_types.html#asynchronous-iterators
+        raise BaseException
+
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-async.pyi]

From 9a0aca14724eeef580eefcd3b340678313a1930a Mon Sep 17 00:00:00 2001
From: Max Murin <maxmurin@dropbox.com>
Date: Wed, 30 Aug 2023 02:24:06 -0700
Subject: [PATCH 090/288] Update version number to 1.7.0+dev (#15989)

We've cut the release branch for 1.6, so the dev version should now be
1.7.

Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
---
 mypy/version.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypy/version.py b/mypy/version.py
index 512890ce7d2b..7cfc68d6e553 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -8,7 +8,7 @@
 # - Release versions have the form "1.2.3".
 # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440).
 # - Before 1.0 we had the form "0.NNN".
-__version__ = "1.6.0+dev"
+__version__ = "1.7.0+dev"
 base_version = __version__
 
 mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))

From df4717ee2cbbeb9e47fbd0e60edcaa6f81bbd7bb Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Thu, 31 Aug 2023 05:39:09 -0700
Subject: [PATCH 091/288] Represent bottom type as Never in messages (#15996)

Fixes #15950
---
 mypy/checker.py                               | 12 +++++-----
 mypy/checkexpr.py                             |  4 ++--
 mypy/expandtype.py                            |  2 +-
 mypy/meet.py                                  |  4 ++--
 mypy/messages.py                              |  6 ++---
 mypy/solve.py                                 |  4 ++--
 mypy/typeops.py                               |  2 +-
 mypy/types.py                                 |  2 +-
 test-data/unit/check-classes.test             | 16 ++++++-------
 test-data/unit/check-dataclass-transform.test | 12 +++++-----
 test-data/unit/check-dataclasses.test         |  8 +++----
 test-data/unit/check-generic-subtyping.test   |  2 +-
 test-data/unit/check-generics.test            | 12 +++++-----
 test-data/unit/check-inference-context.test   | 16 ++++++-------
 test-data/unit/check-inference.test           | 24 +++++++++----------
 test-data/unit/check-isinstance.test          |  4 ++--
 test-data/unit/check-literal.test             |  4 ++--
 test-data/unit/check-narrowing.test           |  2 +-
 test-data/unit/check-native-int.test          |  8 +++----
 test-data/unit/check-overloading.test         | 12 +++++-----
 .../unit/check-parameter-specification.test   |  6 ++---
 test-data/unit/check-plugin-attrs.test        | 10 ++++----
 test-data/unit/check-protocols.test           |  2 +-
 test-data/unit/check-python310.test           |  2 +-
 test-data/unit/check-selftype.test            |  4 ++--
 test-data/unit/check-singledispatch.test      |  2 +-
 test-data/unit/check-typeddict.test           | 10 ++++----
 test-data/unit/check-typevar-tuple.test       |  2 +-
 test-data/unit/check-unreachable-code.test    | 10 ++++----
 test-data/unit/check-varargs.test             | 10 ++++----
 test-data/unit/pythoneval-asyncio.test        |  2 +-
 test-data/unit/pythoneval.test                |  6 ++---
 32 files changed, 111 insertions(+), 111 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index a44601b83e21..fffa87c4f634 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -3934,7 +3934,7 @@ def is_valid_defaultdict_partial_value_type(self, t: ProperType) -> bool:
         Examples:
 
           * t is 'int' --> True
-          * t is 'list[<nothing>]' --> True
+          * t is 'list[Never]' --> True
           * t is 'dict[...]' --> False (only generic types with a single type
             argument supported)
         """
@@ -3980,7 +3980,7 @@ def set_inference_error_fallback_type(self, var: Var, lvalue: Lvalue, type: Type
           x = []  # type: ignore
           x.append(1)   # Should be ok!
 
-        We implement this here by giving x a valid type (replacing inferred <nothing> with Any).
+        We implement this here by giving x a valid type (replacing inferred Never with Any).
         """
         fallback = self.inference_error_fallback_type(type)
         self.set_inferred_type(var, lvalue, fallback)
@@ -7403,7 +7403,7 @@ def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool:
 class InvalidInferredTypes(BoolTypeQuery):
     """Find type components that are not valid for an inferred type.
 
-    These include <Erased> type, and any <nothing> types resulting from failed
+    These include <Erased> type, and any uninhabited types resulting from failed
     (ambiguous) type inference.
     """
 
@@ -7424,7 +7424,7 @@ def visit_type_var(self, t: TypeVarType) -> bool:
 
 
 class SetNothingToAny(TypeTranslator):
-    """Replace all ambiguous <nothing> types with Any (to avoid spurious extra errors)."""
+    """Replace all ambiguous Uninhabited types with Any (to avoid spurious extra errors)."""
 
     def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
         if t.ambiguous:
@@ -7432,7 +7432,7 @@ def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
         return t
 
     def visit_type_alias_type(self, t: TypeAliasType) -> Type:
-        # Target of the alias cannot be an ambiguous <nothing>, so we just
+        # Target of the alias cannot be an ambiguous UninhabitedType, so we just
         # replace the arguments.
         return t.copy_modified(args=[a.accept(self) for a in t.args])
 
@@ -7774,7 +7774,7 @@ def is_subtype_no_promote(left: Type, right: Type) -> bool:
 
 
 def is_overlapping_types_no_promote_no_uninhabited_no_none(left: Type, right: Type) -> bool:
-    # For the purpose of unsafe overload checks we consider list[<nothing>] and list[int]
+    # For the purpose of unsafe overload checks we consider list[Never] and list[int]
     # non-overlapping. This is consistent with how we treat list[int] and list[str] as
     # non-overlapping, despite [] belongs to both. Also this will prevent false positives
     # for failed type inference during unification.
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 218568007b9e..22a9852545b7 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -2080,7 +2080,7 @@ def infer_function_type_arguments(
                 ):
                     freeze_all_type_vars(applied)
                     return applied
-                # If it didn't work, erase free variables as <nothing>, to avoid confusing errors.
+                # If it didn't work, erase free variables as uninhabited, to avoid confusing errors.
                 unknown = UninhabitedType()
                 unknown.ambiguous = True
                 inferred_args = [
@@ -2444,7 +2444,7 @@ def check_argument_types(
                         callee_arg_types = [orig_callee_arg_type]
                         callee_arg_kinds = [ARG_STAR]
                     else:
-                        # TODO: Any and <nothing> can appear in Unpack (as a result of user error),
+                        # TODO: Any and Never can appear in Unpack (as a result of user error),
                         # fail gracefully here and elsewhere (and/or normalize them away).
                         assert isinstance(unpacked_type, Instance)
                         assert unpacked_type.type.fullname == "builtins.tuple"
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 26353c043cb7..be8ecb9ccfd9 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -285,7 +285,7 @@ def expand_unpack(self, t: UnpackType) -> list[Type]:
         ):
             return [UnpackType(typ=repl)]
         elif isinstance(repl, (AnyType, UninhabitedType)):
-            # Replace *Ts = Any with *Ts = *tuple[Any, ...] and some for <nothing>.
+            # Replace *Ts = Any with *Ts = *tuple[Any, ...] and some for Never.
             # These types may appear here as a result of user error or failed inference.
             return [UnpackType(t.type.tuple_fallback.copy_modified(args=[repl]))]
         else:
diff --git a/mypy/meet.py b/mypy/meet.py
index e3a22a226575..2efde4ac7588 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -968,11 +968,11 @@ def typed_dict_mapping_overlap(
 
     As usual empty, dictionaries lie in a gray area. In general, List[str] and List[str]
     are considered non-overlapping despite empty list belongs to both. However, List[int]
-    and List[<nothing>] are considered overlapping.
+    and List[Never] are considered overlapping.
 
     So here we follow the same logic: a TypedDict with no required keys is considered
     non-overlapping with Mapping[str, <some type>], but is considered overlapping with
-    Mapping[<nothing>, <nothing>]. This way we avoid false positives for overloads, and also
+    Mapping[Never, Never]. This way we avoid false positives for overloads, and also
     avoid false positives for comparisons like SomeTypedDict == {} under --strict-equality.
     """
     left, right = get_proper_types((left, right))
diff --git a/mypy/messages.py b/mypy/messages.py
index 1933b74d27bd..cda4cda25ee4 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -2068,7 +2068,7 @@ def report_protocol_problems(
         if supertype.type.fullname in exclusions.get(type(subtype), []):
             return
         if any(isinstance(tp, UninhabitedType) for tp in get_proper_types(supertype.args)):
-            # We don't want to add notes for failed inference (e.g. Iterable[<nothing>]).
+            # We don't want to add notes for failed inference (e.g. Iterable[Never]).
             # This will be only confusing a user even more.
             return
 
@@ -2395,7 +2395,7 @@ def quote_type_string(type_string: str) -> str:
     """Quotes a type representation for use in messages."""
     no_quote_regex = r"^<(tuple|union): \d+ items>$"
     if (
-        type_string in ["Module", "overloaded function", "<nothing>", "<deleted>"]
+        type_string in ["Module", "overloaded function", "Never", "<deleted>"]
         or type_string.startswith("Module ")
         or re.match(no_quote_regex, type_string) is not None
         or type_string.endswith("?")
@@ -2597,7 +2597,7 @@ def format_literal_value(typ: LiteralType) -> str:
         if typ.is_noreturn:
             return "NoReturn"
         else:
-            return "<nothing>"
+            return "Never"
     elif isinstance(typ, TypeType):
         type_name = "type" if options.use_lowercase_names() else "Type"
         return f"{type_name}[{format(typ.item)}]"
diff --git a/mypy/solve.py b/mypy/solve.py
index 5945d97ed85a..95377ea9f93e 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -300,8 +300,8 @@ def test(x: U) -> U: ...
     common_upper_bound_p = get_proper_type(common_upper_bound)
     # We include None for when strict-optional is disabled.
     if isinstance(common_upper_bound_p, (UninhabitedType, NoneType)):
-        # This will cause to infer <nothing>, which is better than a free TypeVar
-        # that has an upper bound <nothing>.
+        # This will cause to infer Never, which is better than a free TypeVar
+        # that has an upper bound Never.
         return None
 
     values: list[Type] = []
diff --git a/mypy/typeops.py b/mypy/typeops.py
index 0e0bc348942e..f9c1914cc9a8 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -330,7 +330,7 @@ class B(A): pass
             )
 
         # Update the method signature with the solutions found.
-        # Technically, some constraints might be unsolvable, make them <nothing>.
+        # Technically, some constraints might be unsolvable, make them Never.
         to_apply = [t if t is not None else UninhabitedType() for t in typeargs]
         func = expand_type(func, {tv.id: arg for tv, arg in zip(self_vars, to_apply)})
         variables = [v for v in func.variables if v not in self_vars]
diff --git a/mypy/types.py b/mypy/types.py
index fb360fb892f1..f974157ce84d 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -3103,7 +3103,7 @@ def visit_none_type(self, t: NoneType) -> str:
         return "None"
 
     def visit_uninhabited_type(self, t: UninhabitedType) -> str:
-        return "<nothing>"
+        return "Never"
 
     def visit_erased_type(self, t: ErasedType) -> str:
         return "<Erased>"
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
index 04b51bb603c5..4bc1e50f7be9 100644
--- a/test-data/unit/check-classes.test
+++ b/test-data/unit/check-classes.test
@@ -7718,13 +7718,13 @@ class D:
     def __init__(self) -> NoReturn: ...
 
 if object():
-    reveal_type(A())  # N: Revealed type is "<nothing>"
+    reveal_type(A())  # N: Revealed type is "Never"
 if object():
-    reveal_type(B())  # N: Revealed type is "<nothing>"
+    reveal_type(B())  # N: Revealed type is "Never"
 if object():
-    reveal_type(C())  # N: Revealed type is "<nothing>"
+    reveal_type(C())  # N: Revealed type is "Never"
 if object():
-    reveal_type(D())  # N: Revealed type is "<nothing>"
+    reveal_type(D())  # N: Revealed type is "Never"
 
 [case testOverloadedNewAndInitNoReturn]
 from typing import NoReturn, overload
@@ -7764,19 +7764,19 @@ class D:
     def __init__(self, a: int = ...) -> None: ...
 
 if object():
-    reveal_type(A())  # N: Revealed type is "<nothing>"
+    reveal_type(A())  # N: Revealed type is "Never"
 reveal_type(A(1))  # N: Revealed type is "__main__.A"
 
 if object():
-    reveal_type(B())  # N: Revealed type is "<nothing>"
+    reveal_type(B())  # N: Revealed type is "Never"
 reveal_type(B(1))  # N: Revealed type is "__main__.B"
 
 if object():
-    reveal_type(C())  # N: Revealed type is "<nothing>"
+    reveal_type(C())  # N: Revealed type is "Never"
 reveal_type(C(1))  # N: Revealed type is "__main__.C"
 
 if object():
-    reveal_type(D())  # N: Revealed type is "<nothing>"
+    reveal_type(D())  # N: Revealed type is "Never"
 reveal_type(D(1))  # N: Revealed type is "__main__.D"
 
 [case testClassScopeImportWithWrapperAndError]
diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test
index 58cd5e5a90f8..743c7fef8aa9 100644
--- a/test-data/unit/check-dataclass-transform.test
+++ b/test-data/unit/check-dataclass-transform.test
@@ -506,7 +506,7 @@ class FunctionModel:
         integer_: tuple
 
 FunctionModel(string_="abc", integer_=1)
-FunctionModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[<nothing>, ...]"; expected "int"
+FunctionModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[Never, ...]"; expected "int"
 
 [typing fixtures/typing-full.pyi]
 [builtins fixtures/dataclasses.pyi]
@@ -529,7 +529,7 @@ class FunctionModel:
         integer_: int
 
 FunctionModel(string_="abc", integer_=1)
-FunctionModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[<nothing>, ...]"; expected "int"
+FunctionModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[Never, ...]"; expected "int"
 
 [typing fixtures/typing-full.pyi]
 [builtins fixtures/dataclasses.pyi]
@@ -552,7 +552,7 @@ class BaseClassModel(ModelBase):
         integer_: tuple
 
 BaseClassModel(string_="abc", integer_=1)
-BaseClassModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[<nothing>, ...]"; expected "int"
+BaseClassModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[Never, ...]"; expected "int"
 
 [typing fixtures/typing-full.pyi]
 [builtins fixtures/dataclasses.pyi]
@@ -574,7 +574,7 @@ class BaseClassModel(ModelBase):
         integer_: int
 
 BaseClassModel(string_="abc", integer_=1)
-BaseClassModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[<nothing>, ...]"; expected "int"
+BaseClassModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[Never, ...]"; expected "int"
 
 [typing fixtures/typing-full.pyi]
 [builtins fixtures/dataclasses.pyi]
@@ -599,7 +599,7 @@ class MetaClassModel(ModelBaseWithMeta):
         integer_: tuple
 
 MetaClassModel(string_="abc", integer_=1)
-MetaClassModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[<nothing>, ...]"; expected "int"
+MetaClassModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[Never, ...]"; expected "int"
 
 [typing fixtures/typing-full.pyi]
 [builtins fixtures/dataclasses.pyi]
@@ -624,7 +624,7 @@ class MetaClassModel(ModelBaseWithMeta):
         integer_: int
 
 MetaClassModel(string_="abc", integer_=1)
-MetaClassModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[<nothing>, ...]"; expected "int"
+MetaClassModel(string_="abc", integer_=tuple())  # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[Never, ...]"; expected "int"
 
 [typing fixtures/typing-full.pyi]
 [builtins fixtures/dataclasses.pyi]
diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test
index 1f5f5635de4e..8a50e7124d05 100644
--- a/test-data/unit/check-dataclasses.test
+++ b/test-data/unit/check-dataclasses.test
@@ -2134,8 +2134,8 @@ T = TypeVar('T')
 class A(Generic[T]):
     x: T  # exercises meet(T=int, int) = int
     y: bool  # exercises meet(bool, int) = bool
-    z: str  # exercises meet(str, bytes) = <nothing>
-    w: dict  # exercises meet(dict, <nothing>) = <nothing>
+    z: str  # exercises meet(str, bytes) = Never
+    w: dict  # exercises meet(dict, Never) = Never
     init_var: InitVar[int]  # exercises (non-optional, optional) = non-optional
 
 @dataclass
@@ -2149,8 +2149,8 @@ class B:
 a_or_b: Union[A[int], B]
 _ = replace(a_or_b, x=42, y=True, init_var=42)
 _ = replace(a_or_b, x=42, y=True)  # E: Missing named argument "init_var" for "replace" of "Union[A[int], B]"
-_ = replace(a_or_b, x=42, y=True, z='42', init_var=42)  # E: Argument "z" to "replace" of "Union[A[int], B]" has incompatible type "str"; expected <nothing>
-_ = replace(a_or_b, x=42, y=True, w={}, init_var=42)  # E: Argument "w" to "replace" of "Union[A[int], B]" has incompatible type "Dict[<nothing>, <nothing>]"; expected <nothing>
+_ = replace(a_or_b, x=42, y=True, z='42', init_var=42)  # E: Argument "z" to "replace" of "Union[A[int], B]" has incompatible type "str"; expected Never
+_ = replace(a_or_b, x=42, y=True, w={}, init_var=42)  # E: Argument "w" to "replace" of "Union[A[int], B]" has incompatible type "Dict[Never, Never]"; expected Never
 _ = replace(a_or_b, y=42, init_var=42)  # E: Argument "y" to "replace" of "Union[A[int], B]" has incompatible type "int"; expected "bool"
 
 [builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test
index 11c92d07021a..fd40f128ff4a 100644
--- a/test-data/unit/check-generic-subtyping.test
+++ b/test-data/unit/check-generic-subtyping.test
@@ -434,7 +434,7 @@ B(1)
 C(1)
 C('a')  # E: Argument 1 to "C" has incompatible type "str"; expected "int"
 D(A(1))
-D(1)  # E: Argument 1 to "D" has incompatible type "int"; expected "A[<nothing>]"
+D(1)  # E: Argument 1 to "D" has incompatible type "int"; expected "A[Never]"
 
 
 [case testInheritedConstructor2]
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 93674c0c2d5c..0781451e07ce 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -573,7 +573,7 @@ def func(x: IntNode[T]) -> IntNode[T]:
     return x
 reveal_type(func) # N: Revealed type is "def [T] (x: __main__.Node[builtins.int, T`-1]) -> __main__.Node[builtins.int, T`-1]"
 
-func(1) # E: Argument 1 to "func" has incompatible type "int"; expected "Node[int, <nothing>]"
+func(1) # E: Argument 1 to "func" has incompatible type "int"; expected "Node[int, Never]"
 func(Node('x', 1)) # E: Argument 1 to "Node" has incompatible type "str"; expected "int"
 reveal_type(func(Node(1, 'x'))) # N: Revealed type is "__main__.Node[builtins.int, builtins.str]"
 
@@ -834,7 +834,7 @@ reveal_type(x) # N: Revealed type is "builtins.int"
 def f2(x: IntTP[T]) -> IntTP[T]:
     return x
 
-f2((1, 2, 3)) # E: Argument 1 to "f2" has incompatible type "Tuple[int, int, int]"; expected "Tuple[int, <nothing>]"
+f2((1, 2, 3)) # E: Argument 1 to "f2" has incompatible type "Tuple[int, int, int]"; expected "Tuple[int, Never]"
 reveal_type(f2((1, 'x'))) # N: Revealed type is "Tuple[builtins.int, builtins.str]"
 
 [builtins fixtures/for.pyi]
@@ -904,7 +904,7 @@ n.y = 'x' # E: Incompatible types in assignment (expression has type "str", vari
 def f(x: Node[T, T]) -> TupledNode[T]:
     return Node(x.x, (x.x, x.x))
 
-f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Node[<nothing>, <nothing>]"
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Node[Never, Never]"
 f(Node(1, 'x')) # E: Cannot infer type argument 1 of "f"
 reveal_type(Node('x', 'x')) # N: Revealed type is "a.Node[builtins.str, builtins.str]"
 
@@ -2279,7 +2279,7 @@ class Box(Generic[T]):
 
 class IteratorBox(Box[Iterator[T]]): ...
 
-@IteratorBox.wrap  # E: Argument 1 to "wrap" of "Box" has incompatible type "Callable[[], int]"; expected "Callable[[], Iterator[<nothing>]]"
+@IteratorBox.wrap  # E: Argument 1 to "wrap" of "Box" has incompatible type "Callable[[], int]"; expected "Callable[[], Iterator[Never]]"
 def g() -> int:
     ...
 [builtins fixtures/classmethod.pyi]
@@ -3034,8 +3034,8 @@ def id2(x: V) -> V:
 reveal_type(dec1(id1))  # N: Revealed type is "def [S <: __main__.B] (S`1) -> builtins.list[S`1]"
 reveal_type(dec1(id2))  # N: Revealed type is "def [S in (builtins.int, builtins.str)] (S`3) -> builtins.list[S`3]"
 reveal_type(dec2(id1))  # N: Revealed type is "def [UC <: __main__.C] (UC`5) -> builtins.list[UC`5]"
-reveal_type(dec2(id2))  # N: Revealed type is "def (<nothing>) -> builtins.list[<nothing>]" \
-                        # E: Argument 1 to "dec2" has incompatible type "Callable[[V], V]"; expected "Callable[[<nothing>], <nothing>]"
+reveal_type(dec2(id2))  # N: Revealed type is "def (Never) -> builtins.list[Never]" \
+                        # E: Argument 1 to "dec2" has incompatible type "Callable[[V], V]"; expected "Callable[[Never], Never]"
 
 [case testInferenceAgainstGenericLambdas]
 # flags: --new-type-inference
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
index 5f25b007dd47..169fee65f127 100644
--- a/test-data/unit/check-inference-context.test
+++ b/test-data/unit/check-inference-context.test
@@ -22,7 +22,7 @@ if int():
 if int():
     ab = f()
 if int():
-    b = f() # E: Incompatible types in assignment (expression has type "A[<nothing>]", variable has type "B")
+    b = f() # E: Incompatible types in assignment (expression has type "A[Never]", variable has type "B")
 [case testBasicContextInferenceForConstructor]
 from typing import TypeVar, Generic
 T = TypeVar('T')
@@ -37,7 +37,7 @@ if int():
 if int():
     ab = A()
 if int():
-    b = A() # E: Incompatible types in assignment (expression has type "A[<nothing>]", variable has type "B")
+    b = A() # E: Incompatible types in assignment (expression has type "A[Never]", variable has type "B")
 [case testIncompatibleContextInference]
 from typing import TypeVar, Generic
 T = TypeVar('T')
@@ -372,7 +372,7 @@ ao: List[object]
 a: A
 def f(): a, aa, ao # Prevent redefinition
 
-a = [] # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "A")
+a = [] # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "A")
 
 aa = []
 ao = []
@@ -842,7 +842,7 @@ T = TypeVar('T')
 def f(x: Union[List[T], str]) -> None: pass
 f([1])
 f('')
-f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Union[List[<nothing>], str]"
+f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Union[List[Never], str]"
 [builtins fixtures/isinstancelist.pyi]
 
 [case testIgnoringInferenceContext]
@@ -911,7 +911,7 @@ from typing import TypeVar, Callable, Generic
 T = TypeVar('T')
 class A(Generic[T]):
     pass
-reveal_type(A()) # N: Revealed type is "__main__.A[<nothing>]"
+reveal_type(A()) # N: Revealed type is "__main__.A[Never]"
 b = reveal_type(A())  # type: A[int] # N: Revealed type is "__main__.A[builtins.int]"
 
 [case testUnionWithGenericTypeItemContext]
@@ -1311,7 +1311,7 @@ from typing import List, TypeVar
 T = TypeVar('T', bound=int)
 def f(x: List[T]) -> T: ...
 
-# mypy infers List[<nothing>] here, and <nothing> is a subtype of str
+# mypy infers List[Never] here, and Never is a subtype of str
 y: str = f([])
 [builtins fixtures/list.pyi]
 
@@ -1323,7 +1323,7 @@ def f(x: List[T]) -> List[T]: ...
 
 # TODO: improve error message for such cases, see #3283 and #5706
 y: List[str] = f([]) \
- # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[str]") \
+ # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[str]") \
  # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
  # N: Consider using "Sequence" instead, which is covariant
 [builtins fixtures/list.pyi]
@@ -1343,7 +1343,7 @@ T = TypeVar('T', bound=int)
 def f(x: Optional[T] = None) -> List[T]: ...
 
 y: List[str] = f()  \
-      # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[str]") \
+      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[str]") \
       # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
       # N: Consider using "Sequence" instead, which is covariant
 [builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 56d3fe2b4ce7..36b028977591 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -876,10 +876,10 @@ g('a')() # E: "List[str]" not callable
 # The next line is a case where there are multiple ways to satisfy a constraint
 # involving a Union. Either T = List[str] or T = str would turn out to be valid,
 # but mypy doesn't know how to branch on these two options (and potentially have
-# to backtrack later) and defaults to T = <nothing>. The result is an
+# to backtrack later) and defaults to T = Never. The result is an
 # awkward error message. Either a better error message, or simply accepting the
 # call, would be preferable here.
-g(['a']) # E: Argument 1 to "g" has incompatible type "List[str]"; expected "List[<nothing>]"
+g(['a']) # E: Argument 1 to "g" has incompatible type "List[str]"; expected "List[Never]"
 
 h(g(['a']))
 
@@ -972,7 +972,7 @@ from typing import TypeVar, Union, List
 T = TypeVar('T')
 def f() -> List[T]: pass
 d1 = f() # type: Union[List[int], str]
-d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "Union[int, str]")
+d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "Union[int, str]")
 def g(x: T) -> List[T]: pass
 d3 = g(1) # type: Union[List[int], List[str]]
 [builtins fixtures/list.pyi]
@@ -3126,7 +3126,7 @@ T = TypeVar('T')
 def f() -> Callable[..., NoReturn]: ...
 
 x = f()
-reveal_type(x)  # N: Revealed type is "def (*Any, **Any) -> <nothing>"
+reveal_type(x)  # N: Revealed type is "def (*Any, **Any) -> Never"
 
 [case testDeferralInNestedScopes]
 
@@ -3635,8 +3635,8 @@ class Call(Protocol[T]):
 def f(x: Call[T]) -> Tuple[T, T]: ...
 
 def g(__x: str) -> None: pass
-reveal_type(f(g))  # N: Revealed type is "Tuple[<nothing>, <nothing>]" \
-                   # E: Argument 1 to "f" has incompatible type "Callable[[str], None]"; expected "Call[<nothing>]"
+reveal_type(f(g))  # N: Revealed type is "Tuple[Never, Never]" \
+                   # E: Argument 1 to "f" has incompatible type "Callable[[str], None]"; expected "Call[Never]"
 [builtins fixtures/list.pyi]
 
 [case testCallableInferenceAgainstCallableNamedVsPosOnly]
@@ -3651,8 +3651,8 @@ class Call(Protocol[T]):
 def f(x: Call[T]) -> Tuple[T, T]: ...
 
 def g(*, x: str) -> None: pass
-reveal_type(f(g))  # N: Revealed type is "Tuple[<nothing>, <nothing>]" \
-                   # E: Argument 1 to "f" has incompatible type "Callable[[NamedArg(str, 'x')], None]"; expected "Call[<nothing>]"
+reveal_type(f(g))  # N: Revealed type is "Tuple[Never, Never]" \
+                   # E: Argument 1 to "f" has incompatible type "Callable[[NamedArg(str, 'x')], None]"; expected "Call[Never]"
 [builtins fixtures/list.pyi]
 
 [case testCallableInferenceAgainstCallablePosOnlyVsKwargs]
@@ -3667,8 +3667,8 @@ class Call(Protocol[T]):
 def f(x: Call[T]) -> Tuple[T, T]: ...
 
 def g(**x: str) -> None: pass
-reveal_type(f(g))  # N: Revealed type is "Tuple[<nothing>, <nothing>]" \
-                   # E: Argument 1 to "f" has incompatible type "Callable[[KwArg(str)], None]"; expected "Call[<nothing>]"
+reveal_type(f(g))  # N: Revealed type is "Tuple[Never, Never]" \
+                   # E: Argument 1 to "f" has incompatible type "Callable[[KwArg(str)], None]"; expected "Call[Never]"
 [builtins fixtures/list.pyi]
 
 [case testCallableInferenceAgainstCallableNamedVsArgs]
@@ -3683,6 +3683,6 @@ class Call(Protocol[T]):
 def f(x: Call[T]) -> Tuple[T, T]: ...
 
 def g(*args: str) -> None: pass
-reveal_type(f(g))  # N: Revealed type is "Tuple[<nothing>, <nothing>]" \
-                   # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[<nothing>]"
+reveal_type(f(g))  # N: Revealed type is "Tuple[Never, Never]" \
+                   # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[Never]"
 [builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test
index 361d4db78752..b7ee38b69d00 100644
--- a/test-data/unit/check-isinstance.test
+++ b/test-data/unit/check-isinstance.test
@@ -1812,9 +1812,9 @@ reveal_type(fm)  # N: Revealed type is "__main__.FooMetaclass"
 if issubclass(fm, Foo):
     reveal_type(fm)  # N: Revealed type is "Type[__main__.Foo]"
 if issubclass(fm, Bar):
-    reveal_type(fm)  # N: Revealed type is "<nothing>"
+    reveal_type(fm)  # N: Revealed type is "Never"
 if issubclass(fm, Baz):
-    reveal_type(fm)  # N: Revealed type is "<nothing>"
+    reveal_type(fm)  # N: Revealed type is "Never"
 [builtins fixtures/isinstance.pyi]
 
 [case testIsinstanceAndNarrowTypeVariable]
diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test
index ecd4fc0a1f00..08c709c6b777 100644
--- a/test-data/unit/check-literal.test
+++ b/test-data/unit/check-literal.test
@@ -1794,7 +1794,7 @@ def f6(x: Optional[Literal[1]], y: Optional[Literal[2]]) -> None: pass
 
 reveal_type(unify(f1))  # N: Revealed type is "Literal[1]"
 if object():
-    reveal_type(unify(f2))  # N: Revealed type is "<nothing>"
+    reveal_type(unify(f2))  # N: Revealed type is "Never"
 reveal_type(unify(f3))  # N: Revealed type is "Literal[1]"
 reveal_type(unify(f4))  # N: Revealed type is "Literal[1]"
 reveal_type(unify(f5))  # N: Revealed type is "Literal[1]"
@@ -1819,7 +1819,7 @@ T = TypeVar('T')
 def unify(func: Callable[[T, T], None]) -> T: pass
 def func(x: Literal[1], y: Literal[2]) -> None: pass
 
-reveal_type(unify(func))  # N: Revealed type is "<nothing>"
+reveal_type(unify(func))  # N: Revealed type is "Never"
 [builtins fixtures/list.pyi]
 [out]
 
diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test
index 291f73a45230..c86cffd453df 100644
--- a/test-data/unit/check-narrowing.test
+++ b/test-data/unit/check-narrowing.test
@@ -1189,7 +1189,7 @@ def f(t: Type[T], a: A, b: B) -> None:
         reveal_type(a)  # N: Revealed type is "__main__.A"
 
     if type(b) is t:
-        reveal_type(b)  # N: Revealed type is "<nothing>"
+        reveal_type(b)  # N: Revealed type is "Never"
     else:
         reveal_type(b)  # N: Revealed type is "__main__.B"
 
diff --git a/test-data/unit/check-native-int.test b/test-data/unit/check-native-int.test
index 30314eebcb31..2f852ca522c5 100644
--- a/test-data/unit/check-native-int.test
+++ b/test-data/unit/check-native-int.test
@@ -87,9 +87,9 @@ reveal_type(meet(f, f32))  # N: Revealed type is "mypy_extensions.i32"
 reveal_type(meet(f64, f))  # N: Revealed type is "mypy_extensions.i64"
 reveal_type(meet(f, f64))  # N: Revealed type is "mypy_extensions.i64"
 if object():
-    reveal_type(meet(f32, f64))  # N: Revealed type is "<nothing>"
+    reveal_type(meet(f32, f64))  # N: Revealed type is "Never"
 if object():
-    reveal_type(meet(f64, f32))  # N: Revealed type is "<nothing>"
+    reveal_type(meet(f64, f32))  # N: Revealed type is "Never"
 
 reveal_type(meet(f, fa))    # N: Revealed type is "builtins.int"
 reveal_type(meet(f32, fa))  # N: Revealed type is "mypy_extensions.i32"
@@ -149,9 +149,9 @@ def ff(x: float) -> None: pass
 def fi32(x: i32) -> None: pass
 
 if object():
-    reveal_type(meet(ff, fi32))  # N: Revealed type is "<nothing>"
+    reveal_type(meet(ff, fi32))  # N: Revealed type is "Never"
 if object():
-    reveal_type(meet(fi32, ff))  # N: Revealed type is "<nothing>"
+    reveal_type(meet(fi32, ff))  # N: Revealed type is "Never"
 [builtins fixtures/dict.pyi]
 
 [case testNativeIntForLoopRange]
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index ede4a2e4cf62..4546c7171856 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -3397,11 +3397,11 @@ def wrapper() -> None:
 
     # Note: These should be fine, but mypy has an unrelated bug
     #       that makes them error out?
-    a2_overload: A = SomeType().foo(obj1)  # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[<nothing>]"
-    a2_union: A    = SomeType().bar(obj1)  # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[<nothing>], W2[<nothing>]]"
+    a2_overload: A = SomeType().foo(obj1)  # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[Never]"
+    a2_union: A    = SomeType().bar(obj1)  # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[Never], W2[Never]]"
 
-    SomeType().foo(obj1)  # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[<nothing>]"
-    SomeType().bar(obj1)  # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[<nothing>], W2[<nothing>]]"
+    SomeType().foo(obj1)  # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[Never]"
+    SomeType().bar(obj1)  # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[Never], W2[Never]]"
 
 [case testOverloadingInferUnionReturnWithBadObjectTypevarReturn]
 from typing import overload, Union, TypeVar, Generic
@@ -3425,8 +3425,8 @@ class SomeType(Generic[T]):
 def wrapper(mysterious: T) -> T:
     obj1: Union[W1[A], W2[B]]
 
-    SomeType().foo(obj1)  # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "W1[<nothing>]"
-    SomeType().bar(obj1)  # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "Union[W1[<nothing>], W2[<nothing>]]"
+    SomeType().foo(obj1)  # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "W1[Never]"
+    SomeType().bar(obj1)  # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "Union[W1[Never], W2[Never]]"
 
     SomeType[A]().foo(obj1)  # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "W1[A]"
     SomeType[A]().bar(obj1)  # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "Union[W1[A], W2[A]]"
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index a98c92ce14e7..d80069644194 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1401,9 +1401,9 @@ def wrong_name_constructor(b: bool) -> SomeClass:
     return SomeClass("a")
 
 func(SomeClass, constructor)
-reveal_type(func(SomeClass, wrong_constructor))  # N: Revealed type is "def (a: <nothing>) -> __main__.SomeClass"
-reveal_type(func_regular(SomeClass, wrong_constructor))  # N: Revealed type is "def (<nothing>) -> __main__.SomeClass"
-func(SomeClass, wrong_name_constructor)  # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[<nothing>], SomeClass]"
+reveal_type(func(SomeClass, wrong_constructor))  # N: Revealed type is "def (a: Never) -> __main__.SomeClass"
+reveal_type(func_regular(SomeClass, wrong_constructor))  # N: Revealed type is "def (Never) -> __main__.SomeClass"
+func(SomeClass, wrong_name_constructor)  # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[Never], SomeClass]"
 [builtins fixtures/paramspec.pyi]
 
 [case testParamSpecInTypeAliasBasic]
diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test
index 1465bab2bb7b..fb5f1f9472c2 100644
--- a/test-data/unit/check-plugin-attrs.test
+++ b/test-data/unit/check-plugin-attrs.test
@@ -1181,7 +1181,7 @@ def my_factory() -> int:
     return 7
 @attr.s
 class A:
-    x: int = attr.ib(factory=list)  # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "int")
+    x: int = attr.ib(factory=list)  # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "int")
     y: str = attr.ib(factory=my_factory) # E: Incompatible types in assignment (expression has type "int", variable has type "str")
 [builtins fixtures/list.pyi]
 
@@ -2131,8 +2131,8 @@ T = TypeVar('T')
 class A(Generic[T]):
     x: T  # exercises meet(T=int, int) = int
     y: bool  # exercises meet(bool, int) = bool
-    z: str  # exercises meet(str, bytes) = <nothing>
-    w: dict  # exercises meet(dict, <nothing>) = <nothing>
+    z: str  # exercises meet(str, bytes) = Never
+    w: dict  # exercises meet(dict, Never) = Never
 
 
 @attrs.define
@@ -2144,8 +2144,8 @@ class B:
 
 a_or_b: A[int] | B
 a2 = attrs.evolve(a_or_b, x=42, y=True)
-a2 = attrs.evolve(a_or_b, x=42, y=True, z='42')  # E: Argument "z" to "evolve" of "Union[A[int], B]" has incompatible type "str"; expected <nothing>
-a2 = attrs.evolve(a_or_b, x=42, y=True, w={})  # E: Argument "w" to "evolve" of "Union[A[int], B]" has incompatible type "Dict[<nothing>, <nothing>]"; expected <nothing>
+a2 = attrs.evolve(a_or_b, x=42, y=True, z='42')  # E: Argument "z" to "evolve" of "Union[A[int], B]" has incompatible type "str"; expected Never
+a2 = attrs.evolve(a_or_b, x=42, y=True, w={})  # E: Argument "w" to "evolve" of "Union[A[int], B]" has incompatible type "Dict[Never, Never]"; expected Never
 
 [builtins fixtures/plugin_attrs.pyi]
 
diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test
index dba01be50fee..e73add454a67 100644
--- a/test-data/unit/check-protocols.test
+++ b/test-data/unit/check-protocols.test
@@ -928,7 +928,7 @@ class L:
 
 def last(seq: Linked[T]) -> T:
     pass
-last(L()) # E: Argument 1 to "last" has incompatible type "L"; expected "Linked[<nothing>]"
+last(L()) # E: Argument 1 to "last" has incompatible type "L"; expected "Linked[Never]"
 
 [case testMutuallyRecursiveProtocols]
 from typing import Protocol, Sequence, List
diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test
index 0fe6a3d5a5cc..f81da23d148c 100644
--- a/test-data/unit/check-python310.test
+++ b/test-data/unit/check-python310.test
@@ -1144,7 +1144,7 @@ m: str
 
 match m:
     case a if a := 1:  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
-        reveal_type(a)  # N: Revealed type is "<nothing>"
+        reveal_type(a)  # N: Revealed type is "Never"
 
 [case testMatchAssigningPatternGuard]
 m: str
diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test
index d5024412ca97..bf7a928ff51d 100644
--- a/test-data/unit/check-selftype.test
+++ b/test-data/unit/check-selftype.test
@@ -1520,7 +1520,7 @@ from typing import Self, TypeVar, Tuple
 T = TypeVar("T")
 class C:
     def meth(self: T) -> Tuple[Self, T]: ...  # E: Method cannot have explicit self annotation and Self type
-reveal_type(C().meth())  # N: Revealed type is "Tuple[<nothing>, __main__.C]"
+reveal_type(C().meth())  # N: Revealed type is "Tuple[Never, __main__.C]"
 [builtins fixtures/property.pyi]
 
 [case testTypingSelfProperty]
@@ -1558,7 +1558,7 @@ class C:
 
 class D(C): ...
 reveal_type(D.meth())  # N: Revealed type is "__main__.D"
-reveal_type(D.bad())  # N: Revealed type is "<nothing>"
+reveal_type(D.bad())  # N: Revealed type is "Never"
 [builtins fixtures/classmethod.pyi]
 
 [case testTypingSelfOverload]
diff --git a/test-data/unit/check-singledispatch.test b/test-data/unit/check-singledispatch.test
index 1adec1575b7e..e63d4c073e86 100644
--- a/test-data/unit/check-singledispatch.test
+++ b/test-data/unit/check-singledispatch.test
@@ -300,7 +300,7 @@ h('a', 1) # E: Argument 2 to "h" has incompatible type "int"; expected "str"
 
 [case testDontCrashWhenRegisteringAfterError]
 import functools
-a = functools.singledispatch('a') # E: Need type annotation for "a" # E: Argument 1 to "singledispatch" has incompatible type "str"; expected "Callable[..., <nothing>]"
+a = functools.singledispatch('a') # E: Need type annotation for "a" # E: Argument 1 to "singledispatch" has incompatible type "str"; expected "Callable[..., Never]"
 
 @a.register(int)
 def default(val) -> int:
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index 7de8e6416f35..b8953f05b6a5 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -602,7 +602,7 @@ YbZ = TypedDict('YbZ', {'y': object, 'z': int})
 T = TypeVar('T')
 def f(x: Callable[[T, T], None]) -> T: pass
 def g(x: XYa, y: YbZ) -> None: pass
-reveal_type(f(g))  # N: Revealed type is "<nothing>"
+reveal_type(f(g))  # N: Revealed type is "Never"
 [builtins fixtures/dict.pyi]
 
 [case testMeetOfTypedDictsWithNoCommonKeysHasAllKeysAndNewFallback]
@@ -625,7 +625,7 @@ M = Mapping[str, int]
 T = TypeVar('T')
 def f(x: Callable[[T, T], None]) -> T: pass
 def g(x: X, y: M) -> None: pass
-reveal_type(f(g))  # N: Revealed type is "<nothing>"
+reveal_type(f(g))  # N: Revealed type is "Never"
 [builtins fixtures/dict.pyi]
 
 [case testMeetOfTypedDictWithIncompatibleMappingIsUninhabited]
@@ -636,7 +636,7 @@ M = Mapping[str, str]
 T = TypeVar('T')
 def f(x: Callable[[T, T], None]) -> T: pass
 def g(x: X, y: M) -> None: pass
-reveal_type(f(g))  # N: Revealed type is "<nothing>"
+reveal_type(f(g))  # N: Revealed type is "Never"
 [builtins fixtures/dict.pyi]
 
 [case testMeetOfTypedDictWithCompatibleMappingSuperclassIsUninhabitedForNow]
@@ -680,7 +680,7 @@ YZ = TypedDict('YZ', {'y': int, 'z': int})
 T = TypeVar('T')
 def f(x: Callable[[T, T], None]) -> T: pass
 def g(x: XY, y: YZ) -> None: pass
-reveal_type(f(g)) # N: Revealed type is "<nothing>"
+reveal_type(f(g)) # N: Revealed type is "Never"
 [builtins fixtures/dict.pyi]
 
 
@@ -1856,7 +1856,7 @@ class Config(TypedDict):
     b: str
 
 x: Config
-x == {}  # E: Non-overlapping equality check (left operand type: "Config", right operand type: "Dict[<nothing>, <nothing>]")
+x == {}  # E: Non-overlapping equality check (left operand type: "Config", right operand type: "Dict[Never, Never]")
 [builtins fixtures/dict.pyi]
 [typing fixtures/typing-typeddict.pyi]
 
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index c8b33ec96b06..f7faab4818c9 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -17,7 +17,7 @@ reveal_type(f(args))  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
 
 reveal_type(f(varargs))  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
 
-f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[<nothing>, ...]"
+f(0)  # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[Never, ...]"
 
 def g(a: Tuple[Unpack[Ts]], b: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]:
     return a
diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test
index 20b5dea9fc87..acb5ca6ea609 100644
--- a/test-data/unit/check-unreachable-code.test
+++ b/test-data/unit/check-unreachable-code.test
@@ -900,15 +900,15 @@ from typing_extensions import NoReturn as TENoReturn
 from mypy_extensions import NoReturn as MENoReturn
 
 bottom1: Never
-reveal_type(bottom1)  # N: Revealed type is "<nothing>"
+reveal_type(bottom1)  # N: Revealed type is "Never"
 bottom2: TENever
-reveal_type(bottom2)  # N: Revealed type is "<nothing>"
+reveal_type(bottom2)  # N: Revealed type is "Never"
 bottom3: NoReturn
-reveal_type(bottom3)  # N: Revealed type is "<nothing>"
+reveal_type(bottom3)  # N: Revealed type is "Never"
 bottom4: TENoReturn
-reveal_type(bottom4)  # N: Revealed type is "<nothing>"
+reveal_type(bottom4)  # N: Revealed type is "Never"
 bottom5: MENoReturn
-reveal_type(bottom5)  # N: Revealed type is "<nothing>"
+reveal_type(bottom5)  # N: Revealed type is "Never"
 
 [builtins fixtures/tuple.pyi]
 
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
index fe09fb43c97c..54546f3973b3 100644
--- a/test-data/unit/check-varargs.test
+++ b/test-data/unit/check-varargs.test
@@ -604,27 +604,27 @@ class B: pass
 if int():
     a, aa = G().f(*[a]) \
       # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A") \
-      # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[A]") \
+      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \
       # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
       # N: Consider using "Sequence" instead, which is covariant
 
 if int():
-    aa, a = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "A")
+    aa, a = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "A")
 if int():
     ab, aa = G().f(*[a]) \
-      # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[A]") \
+      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \
       # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
       # N: Consider using "Sequence" instead, which is covariant \
       # E: Argument 1 to "f" of "G" has incompatible type "*List[A]"; expected "B"
 
 if int():
     ao, ao = G().f(*[a]) \
-      # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[object]") \
+      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[object]") \
       # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
       # N: Consider using "Sequence" instead, which is covariant
 if int():
     aa, aa = G().f(*[a]) \
-      # E: Incompatible types in assignment (expression has type "List[<nothing>]", variable has type "List[A]") \
+      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \
       # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
       # N: Consider using "Sequence" instead, which is covariant
 [builtins fixtures/list.pyi]
diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test
index 556414cf3252..069374b9635c 100644
--- a/test-data/unit/pythoneval-asyncio.test
+++ b/test-data/unit/pythoneval-asyncio.test
@@ -472,7 +472,7 @@ async def bad(arg: P) -> T:
     pass
 [out]
 _program.py:8: note: Revealed type is "def [T] (arg: P?) -> typing.Coroutine[Any, Any, T`-1]"
-_program.py:9: error: Value of type "Coroutine[Any, Any, <nothing>]" must be used
+_program.py:9: error: Value of type "Coroutine[Any, Any, Never]" must be used
 _program.py:9: note: Are you missing an await?
 _program.py:11: error: Variable "_testForwardRefToBadAsyncShouldNotCrash_newsemanal.P" is not valid as a type
 _program.py:11: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index 58dfb172cf76..c5be30eac1b7 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -863,8 +863,8 @@ MyDDict(dict)[0]
 _program.py:7: error: Argument 1 to "defaultdict" has incompatible type "Type[List[Any]]"; expected "Optional[Callable[[], str]]"
 _program.py:10: error: Invalid index type "str" for "defaultdict[int, str]"; expected type "int"
 _program.py:10: error: Incompatible types in assignment (expression has type "int", target has type "str")
-_program.py:20: error: Argument 1 to "tst" has incompatible type "defaultdict[str, List[<nothing>]]"; expected "defaultdict[int, List[<nothing>]]"
-_program.py:24: error: Invalid index type "str" for "MyDDict[Dict[<nothing>, <nothing>]]"; expected type "int"
+_program.py:20: error: Argument 1 to "tst" has incompatible type "defaultdict[str, List[Never]]"; expected "defaultdict[int, List[Never]]"
+_program.py:24: error: Invalid index type "str" for "MyDDict[Dict[Never, Never]]"; expected type "int"
 
 [case testNoSubcriptionOfStdlibCollections]
 # flags: --python-version 3.7
@@ -2004,7 +2004,7 @@ Foo().__dict__ = {}
 _testInferenceOfDunderDictOnClassObjects.py:2: note: Revealed type is "types.MappingProxyType[builtins.str, Any]"
 _testInferenceOfDunderDictOnClassObjects.py:3: note: Revealed type is "builtins.dict[builtins.str, Any]"
 _testInferenceOfDunderDictOnClassObjects.py:4: error: Property "__dict__" defined in "type" is read-only
-_testInferenceOfDunderDictOnClassObjects.py:4: error: Incompatible types in assignment (expression has type "Dict[<nothing>, <nothing>]", variable has type "MappingProxyType[str, Any]")
+_testInferenceOfDunderDictOnClassObjects.py:4: error: Incompatible types in assignment (expression has type "Dict[Never, Never]", variable has type "MappingProxyType[str, Any]")
 
 [case testTypeVarTuple]
 # flags: --enable-incomplete-feature=TypeVarTuple --enable-incomplete-feature=Unpack --python-version=3.11

From 2a6d9cbc45eba360934ddee7b43c607b3edb3095 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
 <41898282+github-actions[bot]@users.noreply.github.com>
Date: Fri, 1 Sep 2023 00:37:41 -0700
Subject: [PATCH 092/288] Sync typeshed (#16009)

Source commit:

https://github.com/python/typeshed/commit/f28cb8b8562ccc382d018129ba4886f241c6db9c
---
 mypy/typeshed/stdlib/_ctypes.pyi            |  6 +-
 mypy/typeshed/stdlib/asyncio/taskgroups.pyi |  7 +-
 mypy/typeshed/stdlib/asyncio/tasks.pyi      | 30 +++++---
 mypy/typeshed/stdlib/configparser.pyi       | 81 ++++++++++++++-------
 mypy/typeshed/stdlib/csv.pyi                |  2 +-
 mypy/typeshed/stdlib/enum.pyi               |  6 +-
 mypy/typeshed/stdlib/genericpath.pyi        |  8 +-
 mypy/typeshed/stdlib/gzip.pyi               |  6 +-
 mypy/typeshed/stdlib/ntpath.pyi             |  5 ++
 mypy/typeshed/stdlib/os/__init__.pyi        |  9 ++-
 mypy/typeshed/stdlib/posixpath.pyi          |  9 +++
 mypy/typeshed/stdlib/pydoc.pyi              |  4 +-
 mypy/typeshed/stdlib/ssl.pyi                | 10 ++-
 mypy/typeshed/stdlib/tempfile.pyi           |  2 +-
 mypy/typeshed/stdlib/unittest/__init__.pyi  |  4 +-
 mypy/typeshed/stdlib/unittest/loader.pyi    |  7 +-
 mypy/typeshed/stdlib/unittest/main.pyi      | 54 ++++++++++----
 mypy/typeshed/stdlib/unittest/result.pyi    |  7 ++
 mypy/typeshed/stdlib/unittest/runner.pyi    | 62 ++++++++++++----
 19 files changed, 233 insertions(+), 86 deletions(-)

diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi
index 756ee86d3342..165bb5337784 100644
--- a/mypy/typeshed/stdlib/_ctypes.pyi
+++ b/mypy/typeshed/stdlib/_ctypes.pyi
@@ -69,7 +69,7 @@ class _CData(metaclass=_CDataMeta):
     def __buffer__(self, __flags: int) -> memoryview: ...
     def __release_buffer__(self, __buffer: memoryview) -> None: ...
 
-class _SimpleCData(Generic[_T], _CData):
+class _SimpleCData(_CData, Generic[_T]):
     value: _T
     # The TypeVar can be unsolved here,
     # but we can't use overloads without creating many, many mypy false-positive errors
@@ -78,7 +78,7 @@ class _SimpleCData(Generic[_T], _CData):
 class _CanCastTo(_CData): ...
 class _PointerLike(_CanCastTo): ...
 
-class _Pointer(Generic[_CT], _PointerLike, _CData):
+class _Pointer(_PointerLike, _CData, Generic[_CT]):
     _type_: type[_CT]
     contents: _CT
     @overload
@@ -140,7 +140,7 @@ class _StructUnionBase(_CData, metaclass=_StructUnionMeta):
 class Union(_StructUnionBase): ...
 class Structure(_StructUnionBase): ...
 
-class Array(Generic[_CT], _CData):
+class Array(_CData, Generic[_CT]):
     @property
     @abstractmethod
     def _length_(self) -> int: ...
diff --git a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi
index 47d9bb2f699e..aec3f1127f15 100644
--- a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi
+++ b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi
@@ -1,10 +1,11 @@
 import sys
 from contextvars import Context
 from types import TracebackType
-from typing import TypeVar
+from typing import Any, TypeVar
 from typing_extensions import Self
 
 from . import _CoroutineLike
+from .events import AbstractEventLoop
 from .tasks import Task
 
 if sys.version_info >= (3, 12):
@@ -15,6 +16,10 @@ else:
 _T = TypeVar("_T")
 
 class TaskGroup:
+    _loop: AbstractEventLoop | None
+    _tasks: set[Task[Any]]
+
     async def __aenter__(self) -> Self: ...
     async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ...
     def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ...
+    def _on_task_done(self, task: Task[object]) -> None: ...
diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi
index 5ea30d3791de..3bc65e3703c5 100644
--- a/mypy/typeshed/stdlib/asyncio/tasks.pyi
+++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi
@@ -243,12 +243,6 @@ if sys.version_info >= (3, 10):
     async def sleep(delay: float) -> None: ...
     @overload
     async def sleep(delay: float, result: _T) -> _T: ...
-    @overload
-    async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ...  # type: ignore[misc]
-    @overload
-    async def wait(
-        fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED"
-    ) -> tuple[set[Task[_T]], set[Task[_T]]]: ...
     async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ...
 
 else:
@@ -257,6 +251,25 @@ else:
     async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ...
     @overload
     async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ...
+    async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ...
+
+if sys.version_info >= (3, 11):
+    @overload
+    async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ...  # type: ignore[misc]
+    @overload
+    async def wait(
+        fs: Iterable[Task[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED"
+    ) -> tuple[set[Task[_T]], set[Task[_T]]]: ...
+
+elif sys.version_info >= (3, 10):
+    @overload
+    async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ...  # type: ignore[misc]
+    @overload
+    async def wait(
+        fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED"
+    ) -> tuple[set[Task[_T]], set[Task[_T]]]: ...
+
+else:
     @overload
     async def wait(  # type: ignore[misc]
         fs: Iterable[_FT],
@@ -273,7 +286,6 @@ else:
         timeout: float | None = None,
         return_when: str = "ALL_COMPLETED",
     ) -> tuple[set[Task[_T]], set[Task[_T]]]: ...
-    async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ...
 
 if sys.version_info >= (3, 12):
     _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co]
@@ -291,7 +303,7 @@ class Task(Future[_T_co], Generic[_T_co]):  # type: ignore[type-var]  # pyright:
             coro: _TaskCompatibleCoro[_T_co],
             *,
             loop: AbstractEventLoop = ...,
-            name: str | None,
+            name: str | None = ...,
             context: Context | None = None,
             eager_start: bool = False,
         ) -> None: ...
@@ -301,7 +313,7 @@ class Task(Future[_T_co], Generic[_T_co]):  # type: ignore[type-var]  # pyright:
             coro: _TaskCompatibleCoro[_T_co],
             *,
             loop: AbstractEventLoop = ...,
-            name: str | None,
+            name: str | None = ...,
             context: Context | None = None,
         ) -> None: ...
     elif sys.version_info >= (3, 8):
diff --git a/mypy/typeshed/stdlib/configparser.pyi b/mypy/typeshed/stdlib/configparser.pyi
index 6f9f788310d1..e6fedb0328c2 100644
--- a/mypy/typeshed/stdlib/configparser.pyi
+++ b/mypy/typeshed/stdlib/configparser.pyi
@@ -5,31 +5,53 @@ from re import Pattern
 from typing import Any, ClassVar, TypeVar, overload
 from typing_extensions import Literal, TypeAlias
 
-__all__ = [
-    "NoSectionError",
-    "DuplicateOptionError",
-    "DuplicateSectionError",
-    "NoOptionError",
-    "InterpolationError",
-    "InterpolationDepthError",
-    "InterpolationMissingOptionError",
-    "InterpolationSyntaxError",
-    "ParsingError",
-    "MissingSectionHeaderError",
-    "ConfigParser",
-    "RawConfigParser",
-    "Interpolation",
-    "BasicInterpolation",
-    "ExtendedInterpolation",
-    "LegacyInterpolation",
-    "SectionProxy",
-    "ConverterMapping",
-    "DEFAULTSECT",
-    "MAX_INTERPOLATION_DEPTH",
-]
-
-if sys.version_info < (3, 12):
-    __all__ += ["SafeConfigParser"]
+if sys.version_info >= (3, 12):
+    __all__ = (
+        "NoSectionError",
+        "DuplicateOptionError",
+        "DuplicateSectionError",
+        "NoOptionError",
+        "InterpolationError",
+        "InterpolationDepthError",
+        "InterpolationMissingOptionError",
+        "InterpolationSyntaxError",
+        "ParsingError",
+        "MissingSectionHeaderError",
+        "ConfigParser",
+        "RawConfigParser",
+        "Interpolation",
+        "BasicInterpolation",
+        "ExtendedInterpolation",
+        "LegacyInterpolation",
+        "SectionProxy",
+        "ConverterMapping",
+        "DEFAULTSECT",
+        "MAX_INTERPOLATION_DEPTH",
+    )
+else:
+    __all__ = [
+        "NoSectionError",
+        "DuplicateOptionError",
+        "DuplicateSectionError",
+        "NoOptionError",
+        "InterpolationError",
+        "InterpolationDepthError",
+        "InterpolationMissingOptionError",
+        "InterpolationSyntaxError",
+        "ParsingError",
+        "MissingSectionHeaderError",
+        "ConfigParser",
+        "SafeConfigParser",
+        "RawConfigParser",
+        "Interpolation",
+        "BasicInterpolation",
+        "ExtendedInterpolation",
+        "LegacyInterpolation",
+        "SectionProxy",
+        "ConverterMapping",
+        "DEFAULTSECT",
+        "MAX_INTERPOLATION_DEPTH",
+    ]
 
 _Section: TypeAlias = Mapping[str, str]
 _Parser: TypeAlias = MutableMapping[str, _Section]
@@ -128,7 +150,8 @@ class RawConfigParser(_Parser):
     def read_file(self, f: Iterable[str], source: str | None = None) -> None: ...
     def read_string(self, string: str, source: str = "<string>") -> None: ...
     def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "<dict>") -> None: ...
-    def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ...
+    if sys.version_info < (3, 12):
+        def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ...
     # These get* methods are partially applied (with the same names) in
     # SectionProxy; the stubs should be kept updated together
     @overload
@@ -277,7 +300,11 @@ class InterpolationSyntaxError(InterpolationError): ...
 class ParsingError(Error):
     source: str
     errors: list[tuple[int, str]]
-    def __init__(self, source: str | None = None, filename: str | None = None) -> None: ...
+    if sys.version_info >= (3, 12):
+        def __init__(self, source: str) -> None: ...
+    else:
+        def __init__(self, source: str | None = None, filename: str | None = None) -> None: ...
+
     def append(self, lineno: int, line: str) -> None: ...
 
 class MissingSectionHeaderError(ParsingError):
diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi
index 139ba7af2208..a9c7fe0492c8 100644
--- a/mypy/typeshed/stdlib/csv.pyi
+++ b/mypy/typeshed/stdlib/csv.pyi
@@ -69,7 +69,7 @@ class excel(Dialect): ...
 class excel_tab(excel): ...
 class unix_dialect(Dialect): ...
 
-class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]):
+class DictReader(Iterator[_DictReadMapping[_T | Any, str | Any]], Generic[_T]):
     fieldnames: Sequence[_T] | None
     restkey: str | None
     restval: str | None
diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi
index a8ba7bf157c2..e6eaf6c413dc 100644
--- a/mypy/typeshed/stdlib/enum.pyi
+++ b/mypy/typeshed/stdlib/enum.pyi
@@ -33,7 +33,7 @@ if sys.version_info >= (3, 11):
         "verify",
     ]
 
-if sys.version_info >= (3, 12):
+if sys.version_info >= (3, 11):
     __all__ += ["pickle_by_enum_name", "pickle_by_global_name"]
 
 _EnumMemberT = TypeVar("_EnumMemberT")
@@ -188,7 +188,7 @@ class Enum(metaclass=EnumMeta):
     def __hash__(self) -> int: ...
     def __format__(self, format_spec: str) -> str: ...
     def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ...
-    if sys.version_info >= (3, 12):
+    if sys.version_info >= (3, 11):
         def __copy__(self) -> Self: ...
         def __deepcopy__(self, memo: Any) -> Self: ...
 
@@ -294,6 +294,6 @@ class auto(IntFlag):
     def value(self) -> Any: ...
     def __new__(cls) -> Self: ...
 
-if sys.version_info >= (3, 12):
+if sys.version_info >= (3, 11):
     def pickle_by_global_name(self: Enum, proto: int) -> str: ...
     def pickle_by_enum_name(self: _EnumMemberT, proto: int) -> tuple[Callable[..., Any], tuple[type[_EnumMemberT], str]]: ...
diff --git a/mypy/typeshed/stdlib/genericpath.pyi b/mypy/typeshed/stdlib/genericpath.pyi
index 46426b63c852..be08f7a3cb79 100644
--- a/mypy/typeshed/stdlib/genericpath.pyi
+++ b/mypy/typeshed/stdlib/genericpath.pyi
@@ -1,5 +1,6 @@
 import os
-from _typeshed import BytesPath, FileDescriptorOrPath, StrPath, SupportsRichComparisonT
+import sys
+from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRichComparisonT
 from collections.abc import Sequence
 from typing import overload
 from typing_extensions import Literal, LiteralString
@@ -17,6 +18,8 @@ __all__ = [
     "sameopenfile",
     "samestat",
 ]
+if sys.version_info >= (3, 12):
+    __all__ += ["islink"]
 
 # All overloads can return empty string. Ideally, Literal[""] would be a valid
 # Iterable[T], so that list[T] | Literal[""] could be used as a return
@@ -36,6 +39,9 @@ def getsize(filename: FileDescriptorOrPath) -> int: ...
 def isfile(path: FileDescriptorOrPath) -> bool: ...
 def isdir(s: FileDescriptorOrPath) -> bool: ...
 
+if sys.version_info >= (3, 12):
+    def islink(path: StrOrBytesPath) -> bool: ...
+
 # These return float if os.stat_float_times() == True,
 # but int is a subclass of float.
 def getatime(filename: FileDescriptorOrPath) -> float: ...
diff --git a/mypy/typeshed/stdlib/gzip.pyi b/mypy/typeshed/stdlib/gzip.pyi
index 1ec8b4b8ca7c..d001849e609c 100644
--- a/mypy/typeshed/stdlib/gzip.pyi
+++ b/mypy/typeshed/stdlib/gzip.pyi
@@ -139,8 +139,10 @@ class GzipFile(_compression.BaseStream):
         fileobj: _ReadableFileobj | _WritableFileobj | None = None,
         mtime: float | None = None,
     ) -> None: ...
-    @property
-    def filename(self) -> str: ...
+    if sys.version_info < (3, 12):
+        @property
+        def filename(self) -> str: ...
+
     @property
     def mtime(self) -> int | None: ...
     crc: int
diff --git a/mypy/typeshed/stdlib/ntpath.pyi b/mypy/typeshed/stdlib/ntpath.pyi
index f1fa137c6d88..1a58b52de050 100644
--- a/mypy/typeshed/stdlib/ntpath.pyi
+++ b/mypy/typeshed/stdlib/ntpath.pyi
@@ -42,6 +42,9 @@ from posixpath import (
     splitext as splitext,
     supports_unicode_filenames as supports_unicode_filenames,
 )
+
+if sys.version_info >= (3, 12):
+    from posixpath import isjunction as isjunction, splitroot as splitroot
 from typing import AnyStr, overload
 from typing_extensions import LiteralString
 
@@ -85,6 +88,8 @@ __all__ = [
     "samestat",
     "commonpath",
 ]
+if sys.version_info >= (3, 12):
+    __all__ += ["isjunction", "splitroot"]
 
 altsep: LiteralString
 
diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi
index 994595aae781..961858ce3c19 100644
--- a/mypy/typeshed/stdlib/os/__init__.pyi
+++ b/mypy/typeshed/stdlib/os/__init__.pyi
@@ -388,6 +388,8 @@ class DirEntry(Generic[AnyStr]):
     def __fspath__(self) -> AnyStr: ...
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, item: Any) -> GenericAlias: ...
+    if sys.version_info >= (3, 12):
+        def is_junction(self) -> bool: ...
 
 @final
 class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, int, int, int, int]):
@@ -602,7 +604,12 @@ def isatty(__fd: int) -> bool: ...
 if sys.platform != "win32" and sys.version_info >= (3, 11):
     def login_tty(__fd: int) -> None: ...
 
-def lseek(__fd: int, __position: int, __how: int) -> int: ...
+if sys.version_info >= (3, 11):
+    def lseek(__fd: int, __position: int, __whence: int) -> int: ...
+
+else:
+    def lseek(__fd: int, __position: int, __how: int) -> int: ...
+
 def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: ...
 def pipe() -> tuple[int, int]: ...
 def read(__fd: int, __length: int) -> bytes: ...
diff --git a/mypy/typeshed/stdlib/posixpath.pyi b/mypy/typeshed/stdlib/posixpath.pyi
index 1945190be5f8..45a8ad7ec6a4 100644
--- a/mypy/typeshed/stdlib/posixpath.pyi
+++ b/mypy/typeshed/stdlib/posixpath.pyi
@@ -58,6 +58,8 @@ __all__ = [
     "relpath",
     "commonpath",
 ]
+if sys.version_info >= (3, 12):
+    __all__ += ["isjunction", "splitroot"]
 
 supports_unicode_filenames: bool
 # aliases (also in os)
@@ -150,3 +152,10 @@ def isabs(s: StrOrBytesPath) -> bool: ...
 def islink(path: FileDescriptorOrPath) -> bool: ...
 def ismount(path: FileDescriptorOrPath) -> bool: ...
 def lexists(path: FileDescriptorOrPath) -> bool: ...
+
+if sys.version_info >= (3, 12):
+    def isjunction(path: StrOrBytesPath) -> bool: ...
+    @overload
+    def splitroot(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr, AnyOrLiteralStr]: ...
+    @overload
+    def splitroot(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr, AnyStr]: ...
diff --git a/mypy/typeshed/stdlib/pydoc.pyi b/mypy/typeshed/stdlib/pydoc.pyi
index 7791c977aa8b..1b09bcb059e4 100644
--- a/mypy/typeshed/stdlib/pydoc.pyi
+++ b/mypy/typeshed/stdlib/pydoc.pyi
@@ -198,7 +198,7 @@ def render_doc(
     thing: str | object, title: str = "Python Library Documentation: %s", forceload: bool = ..., renderer: Doc | None = None
 ) -> str: ...
 
-if sys.version_info >= (3, 12):
+if sys.version_info >= (3, 11):
     def doc(
         thing: str | object,
         title: str = "Python Library Documentation: %s",
@@ -230,7 +230,7 @@ class Helper:
     def __call__(self, request: str | Helper | object = ...) -> None: ...
     def interact(self) -> None: ...
     def getline(self, prompt: str) -> str: ...
-    if sys.version_info >= (3, 12):
+    if sys.version_info >= (3, 11):
         def help(self, request: Any, is_cli: bool = False) -> None: ...
     else:
         def help(self, request: Any) -> None: ...
diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi
index 1c49b130e48f..73762cd75e79 100644
--- a/mypy/typeshed/stdlib/ssl.pyi
+++ b/mypy/typeshed/stdlib/ssl.pyi
@@ -201,12 +201,13 @@ class Options(enum.IntFlag):
     OP_NO_RENEGOTIATION: int
     if sys.version_info >= (3, 8):
         OP_ENABLE_MIDDLEBOX_COMPAT: int
-        if sys.platform == "linux":
-            OP_IGNORE_UNEXPECTED_EOF: int
     if sys.version_info >= (3, 12):
         OP_LEGACY_SERVER_CONNECT: int
     if sys.version_info >= (3, 12) and sys.platform != "linux":
         OP_ENABLE_KTLS: int
+    if sys.version_info >= (3, 11):
+        OP_IGNORE_UNEXPECTED_EOF: int
+    elif sys.version_info >= (3, 8) and sys.platform == "linux":
         OP_IGNORE_UNEXPECTED_EOF: int
 
 OP_ALL: Options
@@ -224,12 +225,13 @@ OP_NO_TICKET: Options
 OP_NO_RENEGOTIATION: Options
 if sys.version_info >= (3, 8):
     OP_ENABLE_MIDDLEBOX_COMPAT: Options
-    if sys.platform == "linux":
-        OP_IGNORE_UNEXPECTED_EOF: Options
 if sys.version_info >= (3, 12):
     OP_LEGACY_SERVER_CONNECT: Options
 if sys.version_info >= (3, 12) and sys.platform != "linux":
     OP_ENABLE_KTLS: Options
+if sys.version_info >= (3, 11):
+    OP_IGNORE_UNEXPECTED_EOF: Options
+elif sys.version_info >= (3, 8) and sys.platform == "linux":
     OP_IGNORE_UNEXPECTED_EOF: Options
 
 HAS_NEVER_CHECK_COMMON_NAME: bool
diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi
index ea04303683b5..61bcde24255b 100644
--- a/mypy/typeshed/stdlib/tempfile.pyi
+++ b/mypy/typeshed/stdlib/tempfile.pyi
@@ -321,7 +321,7 @@ else:
             dir: GenericPath[AnyStr] | None = None,
         ) -> IO[Any]: ...
 
-class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]):
+class _TemporaryFileWrapper(IO[AnyStr], Generic[AnyStr]):
     file: IO[AnyStr]  # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter
     name: str
     delete: bool
diff --git a/mypy/typeshed/stdlib/unittest/__init__.pyi b/mypy/typeshed/stdlib/unittest/__init__.pyi
index 33820c793fa5..f96d6fb185c5 100644
--- a/mypy/typeshed/stdlib/unittest/__init__.pyi
+++ b/mypy/typeshed/stdlib/unittest/__init__.pyi
@@ -65,5 +65,7 @@ if sys.version_info >= (3, 8):
 if sys.version_info >= (3, 11):
     __all__ += ["enterModuleContext", "doModuleCleanups"]
 
-def load_tests(loader: TestLoader, tests: TestSuite, pattern: str | None) -> TestSuite: ...
+if sys.version_info < (3, 12):
+    def load_tests(loader: TestLoader, tests: TestSuite, pattern: str | None) -> TestSuite: ...
+
 def __dir__() -> set[str]: ...
diff --git a/mypy/typeshed/stdlib/unittest/loader.pyi b/mypy/typeshed/stdlib/unittest/loader.pyi
index f3850c939d07..202309ac1d93 100644
--- a/mypy/typeshed/stdlib/unittest/loader.pyi
+++ b/mypy/typeshed/stdlib/unittest/loader.pyi
@@ -1,3 +1,4 @@
+import sys
 import unittest.case
 import unittest.suite
 from collections.abc import Callable, Sequence
@@ -18,7 +19,11 @@ class TestLoader:
     testNamePatterns: list[str] | None
     suiteClass: _SuiteClass
     def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ...
-    def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: Any = None) -> unittest.suite.TestSuite: ...
+    if sys.version_info >= (3, 12):
+        def loadTestsFromModule(self, module: ModuleType, *, pattern: str | None = None) -> unittest.suite.TestSuite: ...
+    else:
+        def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: ...
+
     def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: ...
     def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: ...
     def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: ...
diff --git a/mypy/typeshed/stdlib/unittest/main.pyi b/mypy/typeshed/stdlib/unittest/main.pyi
index 6d970c920096..d29e9a2b8da8 100644
--- a/mypy/typeshed/stdlib/unittest/main.pyi
+++ b/mypy/typeshed/stdlib/unittest/main.pyi
@@ -1,3 +1,4 @@
+import sys
 import unittest.case
 import unittest.loader
 import unittest.result
@@ -23,22 +24,43 @@ class TestProgram:
     progName: str | None
     warnings: str | None
     testNamePatterns: list[str] | None
-    def __init__(
-        self,
-        module: None | str | ModuleType = "__main__",
-        defaultTest: str | Iterable[str] | None = None,
-        argv: list[str] | None = None,
-        testRunner: type[_TestRunner] | _TestRunner | None = None,
-        testLoader: unittest.loader.TestLoader = ...,
-        exit: bool = True,
-        verbosity: int = 1,
-        failfast: bool | None = None,
-        catchbreak: bool | None = None,
-        buffer: bool | None = None,
-        warnings: str | None = None,
-        *,
-        tb_locals: bool = False,
-    ) -> None: ...
+    if sys.version_info >= (3, 12):
+        durations: unittest.result._DurationsType | None
+        def __init__(
+            self,
+            module: None | str | ModuleType = "__main__",
+            defaultTest: str | Iterable[str] | None = None,
+            argv: list[str] | None = None,
+            testRunner: type[_TestRunner] | _TestRunner | None = None,
+            testLoader: unittest.loader.TestLoader = ...,
+            exit: bool = True,
+            verbosity: int = 1,
+            failfast: bool | None = None,
+            catchbreak: bool | None = None,
+            buffer: bool | None = None,
+            warnings: str | None = None,
+            *,
+            tb_locals: bool = False,
+            durations: unittest.result._DurationsType | None = None,
+        ) -> None: ...
+    else:
+        def __init__(
+            self,
+            module: None | str | ModuleType = "__main__",
+            defaultTest: str | Iterable[str] | None = None,
+            argv: list[str] | None = None,
+            testRunner: type[_TestRunner] | _TestRunner | None = None,
+            testLoader: unittest.loader.TestLoader = ...,
+            exit: bool = True,
+            verbosity: int = 1,
+            failfast: bool | None = None,
+            catchbreak: bool | None = None,
+            buffer: bool | None = None,
+            warnings: str | None = None,
+            *,
+            tb_locals: bool = False,
+        ) -> None: ...
+
     def usageExit(self, msg: Any = None) -> None: ...
     def parseArgs(self, argv: list[str]) -> None: ...
     def createTests(self, from_discovery: bool = False, Loader: unittest.loader.TestLoader | None = None) -> None: ...
diff --git a/mypy/typeshed/stdlib/unittest/result.pyi b/mypy/typeshed/stdlib/unittest/result.pyi
index 8d78bc0f7dcf..dfc505936f59 100644
--- a/mypy/typeshed/stdlib/unittest/result.pyi
+++ b/mypy/typeshed/stdlib/unittest/result.pyi
@@ -1,9 +1,12 @@
+import sys
 import unittest.case
 from _typeshed import OptExcInfo
 from collections.abc import Callable
 from typing import Any, TextIO, TypeVar
+from typing_extensions import TypeAlias
 
 _F = TypeVar("_F", bound=Callable[..., Any])
+_DurationsType: TypeAlias = list[tuple[str, float]]
 
 STDOUT_LINE: str
 STDERR_LINE: str
@@ -22,6 +25,8 @@ class TestResult:
     buffer: bool
     failfast: bool
     tb_locals: bool
+    if sys.version_info >= (3, 12):
+        collectedDurations: _DurationsType
     def __init__(self, stream: TextIO | None = None, descriptions: bool | None = None, verbosity: int | None = None) -> None: ...
     def printErrors(self) -> None: ...
     def wasSuccessful(self) -> bool: ...
@@ -37,3 +42,5 @@ class TestResult:
     def addExpectedFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ...
     def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: ...
     def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, err: OptExcInfo | None) -> None: ...
+    if sys.version_info >= (3, 12):
+        def addDuration(self, test: unittest.case.TestCase, elapsed: float) -> None: ...
diff --git a/mypy/typeshed/stdlib/unittest/runner.pyi b/mypy/typeshed/stdlib/unittest/runner.pyi
index c0ddcdb49208..0033083ac406 100644
--- a/mypy/typeshed/stdlib/unittest/runner.pyi
+++ b/mypy/typeshed/stdlib/unittest/runner.pyi
@@ -1,6 +1,8 @@
+import sys
 import unittest.case
 import unittest.result
 import unittest.suite
+from _typeshed import Incomplete
 from collections.abc import Callable, Iterable
 from typing import TextIO
 from typing_extensions import TypeAlias
@@ -14,23 +16,57 @@ class TextTestResult(unittest.result.TestResult):
     separator2: str
     showAll: bool  # undocumented
     stream: TextIO  # undocumented
-    def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ...
+    if sys.version_info >= (3, 12):
+        durations: unittest.result._DurationsType | None
+        def __init__(
+            self, stream: TextIO, descriptions: bool, verbosity: int, *, durations: unittest.result._DurationsType | None = None
+        ) -> None: ...
+    else:
+        def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ...
+
     def getDescription(self, test: unittest.case.TestCase) -> str: ...
     def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ...
 
 class TextTestRunner:
     resultclass: _ResultClassType
-    def __init__(
-        self,
-        stream: TextIO | None = None,
-        descriptions: bool = True,
-        verbosity: int = 1,
-        failfast: bool = False,
-        buffer: bool = False,
-        resultclass: _ResultClassType | None = None,
-        warnings: type[Warning] | None = None,
-        *,
-        tb_locals: bool = False,
-    ) -> None: ...
+    # TODO: add `_WritelnDecorator` type
+    # stream: _WritelnDecorator
+    stream: Incomplete
+    descriptions: bool
+    verbosity: int
+    failfast: bool
+    buffer: bool
+    warnings: str | None
+    tb_locals: bool
+
+    if sys.version_info >= (3, 12):
+        durations: unittest.result._DurationsType | None
+        def __init__(
+            self,
+            stream: TextIO | None = None,
+            descriptions: bool = True,
+            verbosity: int = 1,
+            failfast: bool = False,
+            buffer: bool = False,
+            resultclass: _ResultClassType | None = None,
+            warnings: str | None = None,
+            *,
+            tb_locals: bool = False,
+            durations: unittest.result._DurationsType | None = None,
+        ) -> None: ...
+    else:
+        def __init__(
+            self,
+            stream: TextIO | None = None,
+            descriptions: bool = True,
+            verbosity: int = 1,
+            failfast: bool = False,
+            buffer: bool = False,
+            resultclass: _ResultClassType | None = None,
+            warnings: str | None = None,
+            *,
+            tb_locals: bool = False,
+        ) -> None: ...
+
     def _makeResult(self) -> unittest.result.TestResult: ...
     def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ...

From d440490270b643b2be333b5b27b154813f016ab6 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Fri, 1 Sep 2023 03:40:24 -0700
Subject: [PATCH 093/288] Deduplicate iterable logic (#16006)

This e.g. makes sure both code paths receive my fix in #15688
---
 mypy/checker.py | 35 ++++++++++++-----------------------
 1 file changed, 12 insertions(+), 23 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index fffa87c4f634..fa7c645873d0 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -41,7 +41,6 @@
 from mypy.errorcodes import TYPE_VAR, UNUSED_AWAITABLE, UNUSED_COROUTINE, ErrorCode
 from mypy.errors import Errors, ErrorWatcher, report_internal_error
 from mypy.expandtype import expand_self_type, expand_type, expand_type_by_instance
-from mypy.join import join_types
 from mypy.literals import Key, extract_var_from_literal_hash, literal, literal_hash
 from mypy.maptype import map_instance_to_supertype
 from mypy.meet import is_overlapping_erased_types, is_overlapping_types
@@ -4653,42 +4652,32 @@ def analyze_async_iterable_item_type(self, expr: Expression) -> tuple[Type, Type
 
     def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]:
         """Analyse iterable expression and return iterator and iterator item types."""
-        echk = self.expr_checker
-        iterable = get_proper_type(echk.accept(expr))
-        iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], expr)[0]
-
+        iterator, iterable = self.analyze_iterable_item_type_without_expression(
+            self.expr_checker.accept(expr), context=expr
+        )
         int_type = self.analyze_range_native_int_type(expr)
         if int_type:
             return iterator, int_type
-
-        if (
-            isinstance(iterable, TupleType)
-            and iterable.partial_fallback.type.fullname == "builtins.tuple"
-        ):
-            return iterator, tuple_fallback(iterable).args[0]
-        else:
-            # Non-tuple iterable.
-            return iterator, echk.check_method_call_by_name("__next__", iterator, [], [], expr)[0]
+        return iterator, iterable
 
     def analyze_iterable_item_type_without_expression(
         self, type: Type, context: Context
     ) -> tuple[Type, Type]:
         """Analyse iterable type and return iterator and iterator item types."""
         echk = self.expr_checker
+        iterable: Type
         iterable = get_proper_type(type)
         iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], context)[0]
 
-        if isinstance(iterable, TupleType):
-            joined: Type = UninhabitedType()
-            for item in iterable.items:
-                joined = join_types(joined, item)
-            return iterator, joined
+        if (
+            isinstance(iterable, TupleType)
+            and iterable.partial_fallback.type.fullname == "builtins.tuple"
+        ):
+            return iterator, tuple_fallback(iterable).args[0]
         else:
             # Non-tuple iterable.
-            return (
-                iterator,
-                echk.check_method_call_by_name("__next__", iterator, [], [], context)[0],
-            )
+            iterable = echk.check_method_call_by_name("__next__", iterator, [], [], context)[0]
+            return iterator, iterable
 
     def analyze_range_native_int_type(self, expr: Expression) -> Type | None:
         """Try to infer native int item type from arguments to range(...).

From 803f61097b0eba6505c976d72ce2176b8c64d987 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Fri, 1 Sep 2023 15:33:01 -0700
Subject: [PATCH 094/288] Fix crash when parsing error code config with typo
 (#16005)

Fixes #16002
---
 mypy/config_parser.py | 15 +++++++++++++++
 1 file changed, 15 insertions(+)

diff --git a/mypy/config_parser.py b/mypy/config_parser.py
index a84f3594a0d2..7748c3b25966 100644
--- a/mypy/config_parser.py
+++ b/mypy/config_parser.py
@@ -434,11 +434,26 @@ def parse_section(
     """
     results: dict[str, object] = {}
     report_dirs: dict[str, str] = {}
+
+    # Because these fields exist on Options, without proactive checking, we would accept them
+    # and crash later
+    invalid_options = {
+        "enabled_error_codes": "enable_error_code",
+        "disabled_error_codes": "disable_error_code",
+    }
+
     for key in section:
         invert = False
         options_key = key
         if key in config_types:
             ct = config_types[key]
+        elif key in invalid_options:
+            print(
+                f"{prefix}Unrecognized option: {key} = {section[key]}"
+                f" (did you mean {invalid_options[key]}?)",
+                file=stderr,
+            )
+            continue
         else:
             dv = None
             # We have to keep new_semantic_analyzer in Options

From 0c29507e6ef870eb96da222a734dc8ef8e5fbe24 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Fri, 1 Sep 2023 19:16:11 -0700
Subject: [PATCH 095/288] Make PEP 695 constructs give a reasonable error
 message (#16013)

Mypy does not yet support PEP 695

Fixes #16011, linking #15238
---
 mypy/fastparse.py                   | 31 +++++++++++++++
 mypy/test/helpers.py                |  4 +-
 mypy/test/testcheck.py              |  2 +
 test-data/unit/check-python312.test | 59 +++++++++++++++++++++++++++++
 4 files changed, 95 insertions(+), 1 deletion(-)
 create mode 100644 test-data/unit/check-python312.test

diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 6aa626afb81e..a96e697d40bf 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -144,6 +144,11 @@ def ast3_parse(
 NamedExpr = ast3.NamedExpr
 Constant = ast3.Constant
 
+if sys.version_info >= (3, 12):
+    ast_TypeAlias = ast3.TypeAlias
+else:
+    ast_TypeAlias = Any
+
 if sys.version_info >= (3, 10):
     Match = ast3.Match
     MatchValue = ast3.MatchValue
@@ -936,6 +941,14 @@ def do_func_def(
                 arg_types = [AnyType(TypeOfAny.from_error)] * len(args)
                 return_type = AnyType(TypeOfAny.from_error)
         else:
+            if sys.version_info >= (3, 12) and n.type_params:
+                self.fail(
+                    ErrorMessage("PEP 695 generics are not yet supported", code=codes.VALID_TYPE),
+                    n.type_params[0].lineno,
+                    n.type_params[0].col_offset,
+                    blocker=False,
+                )
+
             arg_types = [a.type_annotation for a in args]
             return_type = TypeConverter(
                 self.errors, line=n.returns.lineno if n.returns else lineno
@@ -1110,6 +1123,14 @@ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef:
         self.class_and_function_stack.append("C")
         keywords = [(kw.arg, self.visit(kw.value)) for kw in n.keywords if kw.arg]
 
+        if sys.version_info >= (3, 12) and n.type_params:
+            self.fail(
+                ErrorMessage("PEP 695 generics are not yet supported", code=codes.VALID_TYPE),
+                n.type_params[0].lineno,
+                n.type_params[0].col_offset,
+                blocker=False,
+            )
+
         cdef = ClassDef(
             n.name,
             self.as_required_block(n.body),
@@ -1717,6 +1738,16 @@ def visit_MatchOr(self, n: MatchOr) -> OrPattern:
         node = OrPattern([self.visit(pattern) for pattern in n.patterns])
         return self.set_line(node, n)
 
+    def visit_TypeAlias(self, n: ast_TypeAlias) -> AssignmentStmt:
+        self.fail(
+            ErrorMessage("PEP 695 type aliases are not yet supported", code=codes.VALID_TYPE),
+            n.lineno,
+            n.col_offset,
+            blocker=False,
+        )
+        node = AssignmentStmt([NameExpr(n.name.id)], self.visit(n.value))
+        return self.set_line(node, n)
+
 
 class TypeConverter:
     def __init__(
diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py
index d1850219e60a..7447391593d5 100644
--- a/mypy/test/helpers.py
+++ b/mypy/test/helpers.py
@@ -241,7 +241,9 @@ def num_skipped_suffix_lines(a1: list[str], a2: list[str]) -> int:
 
 
 def testfile_pyversion(path: str) -> tuple[int, int]:
-    if path.endswith("python311.test"):
+    if path.endswith("python312.test"):
+        return 3, 12
+    elif path.endswith("python311.test"):
         return 3, 11
     elif path.endswith("python310.test"):
         return 3, 10
diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py
index 7b81deeafe9d..b20e8cc25f3d 100644
--- a/mypy/test/testcheck.py
+++ b/mypy/test/testcheck.py
@@ -43,6 +43,8 @@
     typecheck_files.remove("check-python310.test")
 if sys.version_info < (3, 11):
     typecheck_files.remove("check-python311.test")
+if sys.version_info < (3, 12):
+    typecheck_files.remove("check-python312.test")
 
 # Special tests for platforms with case-insensitive filesystems.
 if sys.platform not in ("darwin", "win32"):
diff --git a/test-data/unit/check-python312.test b/test-data/unit/check-python312.test
new file mode 100644
index 000000000000..91aca7794071
--- /dev/null
+++ b/test-data/unit/check-python312.test
@@ -0,0 +1,59 @@
+[case test695TypeAlias]
+type MyInt = int  # E: PEP 695 type aliases are not yet supported
+
+def f(x: MyInt) -> MyInt:
+    return reveal_type(x)  # N: Revealed type is "builtins.int"
+
+type MyList[T] = list[T]  # E: PEP 695 type aliases are not yet supported \
+                          # E: Name "T" is not defined
+
+def g(x: MyList[int]) -> MyList[int]:  # E: Variable "__main__.MyList" is not valid as a type \
+                                       # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases
+    return reveal_type(x)  # N: Revealed type is "MyList?[builtins.int]"
+
+[case test695Class]
+class MyGen[T]:  # E: PEP 695 generics are not yet supported
+    def __init__(self, x: T) -> None:  # E: Name "T" is not defined
+        self.x = x
+
+def f(x: MyGen[int]):  # E: "MyGen" expects no type arguments, but 1 given
+    reveal_type(x.x)  # N: Revealed type is "Any"
+
+[case test695Function]
+def f[T](x: T) -> T:  # E: PEP 695 generics are not yet supported \
+                      # E: Name "T" is not defined
+    return reveal_type(x)  # N: Revealed type is "Any"
+
+reveal_type(f(1))  # N: Revealed type is "Any"
+
+async def g[T](x: T) -> T:  # E: PEP 695 generics are not yet supported \
+                            # E: Name "T" is not defined
+    return reveal_type(x)  # N: Revealed type is "Any"
+
+reveal_type(g(1))  # E: Value of type "Coroutine[Any, Any, Any]" must be used \
+                   # N: Are you missing an await? \
+                   # N: Revealed type is "typing.Coroutine[Any, Any, Any]"
+
+[case test695TypeVar]
+from typing import Callable
+type Alias1[T: int] = list[T]  # E: PEP 695 type aliases are not yet supported
+type Alias2[**P] = Callable[P, int]  # E: PEP 695 type aliases are not yet supported \
+                                     # E: Value of type "int" is not indexable \
+                                     # E: Name "P" is not defined
+type Alias3[*Ts] = tuple[*Ts]  # E: PEP 695 type aliases are not yet supported \
+                               # E: Type expected within [...] \
+                               # E: The type "Type[Tuple[Any, ...]]" is not generic and not indexable \
+                               # E: Name "Ts" is not defined
+
+class Cls1[T: int]: ...  # E: PEP 695 generics are not yet supported
+class Cls2[**P]: ...  # E: PEP 695 generics are not yet supported
+class Cls3[*Ts]: ...  # E: PEP 695 generics are not yet supported
+
+def func1[T: int](x: T) -> T: ...  # E: PEP 695 generics are not yet supported
+def func2[**P](x: Callable[P, int]) -> Callable[P, str]: ...  # E: PEP 695 generics are not yet supported \
+                                                              # E: The first argument to Callable must be a list of types, parameter specification, or "..." \
+                                                              # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas \
+                                                              # E: Name "P" is not defined
+def func3[*Ts](x: tuple[*Ts]) -> tuple[int, *Ts]: ...  # E: PEP 695 generics are not yet supported \
+                                                       # E: Name "Ts" is not defined
+[builtins fixtures/tuple.pyi]

From 17e9e228f2efaeab2ca063cca44411feaa370dd5 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Fri, 1 Sep 2023 22:47:37 -0700
Subject: [PATCH 096/288] Match note error codes to import error codes (#16004)

Fixes #16003. Follow up to #14740
---
 mypy/build.py              | 2 +-
 mypy/errors.py             | 2 +-
 mypy/report.py             | 2 +-
 mypy/test/testcheck.py     | 2 +-
 mypy/test/testcmdline.py   | 2 +-
 mypy/test/testreports.py   | 4 ++--
 test-data/unit/pep561.test | 1 +
 7 files changed, 8 insertions(+), 7 deletions(-)

diff --git a/mypy/build.py b/mypy/build.py
index 525d5f436e7e..39629c2dc455 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -2798,7 +2798,7 @@ def module_not_found(
         for note in notes:
             if "{stub_dist}" in note:
                 note = note.format(stub_dist=stub_distribution_name(module))
-            errors.report(line, 0, note, severity="note", only_once=True, code=codes.IMPORT)
+            errors.report(line, 0, note, severity="note", only_once=True, code=code)
         if reason is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED:
             manager.missing_stub_packages.add(stub_distribution_name(module))
     errors.set_import_context(save_import_context)
diff --git a/mypy/errors.py b/mypy/errors.py
index 680b7f1d31ea..a678b790cb8c 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -469,7 +469,7 @@ def _add_error_info(self, file: str, info: ErrorInfo) -> None:
         self.error_info_map[file].append(info)
         if info.blocker:
             self.has_blockers.add(file)
-        if info.code is IMPORT:
+        if info.code in (IMPORT, IMPORT_UNTYPED, IMPORT_NOT_FOUND):
             self.seen_import_error = True
 
     def _filter_error(self, file: str, info: ErrorInfo) -> bool:
diff --git a/mypy/report.py b/mypy/report.py
index 5d93351aa37d..d5f16464c0fb 100644
--- a/mypy/report.py
+++ b/mypy/report.py
@@ -25,7 +25,7 @@
 from mypy.version import __version__
 
 try:
-    from lxml import etree  # type: ignore[import]
+    from lxml import etree  # type: ignore[import-untyped]
 
     LXML_INSTALLED = True
 except ImportError:
diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py
index b20e8cc25f3d..85fbe5dc2990 100644
--- a/mypy/test/testcheck.py
+++ b/mypy/test/testcheck.py
@@ -26,7 +26,7 @@
 from mypy.test.update_data import update_testcase_output
 
 try:
-    import lxml  # type: ignore[import]
+    import lxml  # type: ignore[import-untyped]
 except ImportError:
     lxml = None
 
diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py
index 30ecef07a821..9bc02d319964 100644
--- a/mypy/test/testcmdline.py
+++ b/mypy/test/testcmdline.py
@@ -20,7 +20,7 @@
 )
 
 try:
-    import lxml  # type: ignore[import]
+    import lxml  # type: ignore[import-untyped]
 except ImportError:
     lxml = None
 
diff --git a/mypy/test/testreports.py b/mypy/test/testreports.py
index a422b4bb2a7b..5ff315f83ba8 100644
--- a/mypy/test/testreports.py
+++ b/mypy/test/testreports.py
@@ -7,7 +7,7 @@
 from mypy.test.helpers import Suite, assert_equal
 
 try:
-    import lxml  # type: ignore[import]
+    import lxml  # type: ignore[import-untyped]
 except ImportError:
     lxml = None
 
@@ -22,7 +22,7 @@ def test_get_line_rate(self) -> None:
 
     @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?")
     def test_as_xml(self) -> None:
-        import lxml.etree as etree  # type: ignore[import]
+        import lxml.etree as etree  # type: ignore[import-untyped]
 
         cobertura_package = CoberturaPackage("foobar")
         cobertura_package.covered_lines = 21
diff --git a/test-data/unit/pep561.test b/test-data/unit/pep561.test
index e8ebbd03dca7..9969c2894c36 100644
--- a/test-data/unit/pep561.test
+++ b/test-data/unit/pep561.test
@@ -167,6 +167,7 @@ a.bf(False)
 b.bf(False)
 a.bf(1)
 b.bf(1)
+import typedpkg_ns.whatever as c  # type: ignore[import-untyped]
 [out]
 testNamespacePkgWStubs.py:4: error: Skipping analyzing "typedpkg_ns.b.bbb": module is installed, but missing library stubs or py.typed marker
 testNamespacePkgWStubs.py:4: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports

From 6884aa2b27f7ec25ebeffab9e39b35807642a9d2 Mon Sep 17 00:00:00 2001
From: DS/Charlie <82801887+ds-cbo@users.noreply.github.com>
Date: Sat, 2 Sep 2023 08:57:28 +0200
Subject: [PATCH 097/288] Fix case Any() in match statement (#14479)

Fixes #14477
---
 mypy/checkpattern.py                |  6 ++++++
 test-data/unit/check-python310.test | 10 ++++++++++
 2 files changed, 16 insertions(+)

diff --git a/mypy/checkpattern.py b/mypy/checkpattern.py
index e432675b0b5a..3f9a99b21530 100644
--- a/mypy/checkpattern.py
+++ b/mypy/checkpattern.py
@@ -462,6 +462,12 @@ def visit_class_pattern(self, o: ClassPattern) -> PatternType:
             typ: Type = Instance(type_info, [any_type] * len(type_info.defn.type_vars))
         elif isinstance(type_info, TypeAlias):
             typ = type_info.target
+        elif (
+            isinstance(type_info, Var)
+            and type_info.type is not None
+            and isinstance(get_proper_type(type_info.type), AnyType)
+        ):
+            typ = type_info.type
         else:
             if isinstance(type_info, Var) and type_info.type is not None:
                 name = type_info.type.str_with_options(self.options)
diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test
index f81da23d148c..640e64c78d5f 100644
--- a/test-data/unit/check-python310.test
+++ b/test-data/unit/check-python310.test
@@ -896,6 +896,16 @@ match m:
         reveal_type(i)
         reveal_type(j)
 
+[case testMatchClassPatternAny]
+from typing import Any
+
+Foo: Any
+m: object
+
+match m:
+    case Foo():
+        pass
+
 [case testMatchClassPatternNestedGenerics]
 # From cpython test_patma.py
 x = [[{0: 0}]]

From 1655b0ce16af04cc76cf769a693366e9206a03de Mon Sep 17 00:00:00 2001
From: Albert Tugushev <albert@tugushev.ru>
Date: Sat, 2 Sep 2023 09:02:40 +0200
Subject: [PATCH 098/288] Reword the error message related to void functions
 (#15876)

Fixes #3226.

Aims to provide better assistance to users who may be confused when
their void functions technically return None.

Co-authored-by: Ilya Priven <ilya.konstantinov@gmail.com>
Co-authored-by: hauntsaninja <hauntsaninja@gmail.com>
---
 docs/source/error_code_list.rst        |  2 +-
 mypy/messages.py                       | 15 ++----
 test-data/unit/check-errorcodes.test   |  6 +--
 test-data/unit/check-expressions.test  | 66 +++++++++++++-------------
 test-data/unit/check-functions.test    |  2 +-
 test-data/unit/check-inference.test    |  8 ++--
 test-data/unit/check-optional.test     |  6 +--
 test-data/unit/check-tuples.test       |  4 +-
 test-data/unit/check-varargs.test      |  4 +-
 test-data/unit/pythoneval-asyncio.test |  2 +-
 10 files changed, 54 insertions(+), 61 deletions(-)

diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst
index 1f75ac54d525..a865a4dd1532 100644
--- a/docs/source/error_code_list.rst
+++ b/docs/source/error_code_list.rst
@@ -741,7 +741,7 @@ returns ``None``:
    # OK: we don't do anything with the return value
    f()
 
-   # Error: "f" does not return a value  [func-returns-value]
+   # Error: "f" does not return a value (it only ever returns None)  [func-returns-value]
    if f():
         print("not false")
 
diff --git a/mypy/messages.py b/mypy/messages.py
index cda4cda25ee4..4b71bd876dcc 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -1021,18 +1021,11 @@ def duplicate_argument_value(self, callee: CallableType, index: int, context: Co
 
     def does_not_return_value(self, callee_type: Type | None, context: Context) -> None:
         """Report an error about use of an unusable type."""
-        name: str | None = None
         callee_type = get_proper_type(callee_type)
-        if isinstance(callee_type, FunctionLike):
-            name = callable_name(callee_type)
-        if name is not None:
-            self.fail(
-                f"{capitalize(name)} does not return a value",
-                context,
-                code=codes.FUNC_RETURNS_VALUE,
-            )
-        else:
-            self.fail("Function does not return a value", context, code=codes.FUNC_RETURNS_VALUE)
+        callee_name = callable_name(callee_type) if isinstance(callee_type, FunctionLike) else None
+        name = callee_name or "Function"
+        message = f"{name} does not return a value (it only ever returns None)"
+        self.fail(message, context, code=codes.FUNC_RETURNS_VALUE)
 
     def deleted_as_rvalue(self, typ: DeletedType, context: Context) -> None:
         """Report an error about using an deleted type as an rvalue."""
diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test
index 796e1c1ea98e..df14e328ed72 100644
--- a/test-data/unit/check-errorcodes.test
+++ b/test-data/unit/check-errorcodes.test
@@ -553,15 +553,15 @@ from typing import Callable
 
 def f() -> None: pass
 
-x = f()  # E: "f" does not return a value  [func-returns-value]
+x = f()  # E: "f" does not return a value (it only ever returns None)  [func-returns-value]
 
 class A:
     def g(self) -> None: pass
 
-y = A().g()  # E: "g" of "A" does not return a value  [func-returns-value]
+y = A().g()  # E: "g" of "A" does not return a value (it only ever returns None)  [func-returns-value]
 
 c: Callable[[], None]
-z = c()  # E: Function does not return a value  [func-returns-value]
+z = c()  # E: Function does not return a value (it only ever returns None)  [func-returns-value]
 
 [case testErrorCodeInstantiateAbstract]
 from abc import abstractmethod
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
index c213255997f8..a3c1bc8795f2 100644
--- a/test-data/unit/check-expressions.test
+++ b/test-data/unit/check-expressions.test
@@ -1079,15 +1079,15 @@ class A:
 a: A
 o: object
 if int():
-    a = f()         # E: "f" does not return a value
+    a = f()         # E: "f" does not return a value (it only ever returns None)
 if int():
-    o = a()         # E: Function does not return a value
+    o = a()         # E: Function does not return a value (it only ever returns None)
 if int():
-    o = A().g(a)    # E: "g" of "A" does not return a value
+    o = A().g(a)    # E: "g" of "A" does not return a value (it only ever returns None)
 if int():
-    o = A.g(a, a)   # E: "g" of "A" does not return a value
-A().g(f())      # E: "f" does not return a value
-x: A = f()      # E: "f" does not return a value
+    o = A.g(a, a)   # E: "g" of "A" does not return a value (it only ever returns None)
+A().g(f())      # E: "f" does not return a value (it only ever returns None)
+x: A = f()      # E: "f" does not return a value (it only ever returns None)
 f()
 A().g(a)
 [builtins fixtures/tuple.pyi]
@@ -1096,15 +1096,15 @@ A().g(a)
 import typing
 def f() -> None: pass
 
-if f():   # E: "f" does not return a value
+if f():   # E: "f" does not return a value (it only ever returns None)
     pass
-elif f(): # E: "f" does not return a value
+elif f(): # E: "f" does not return a value (it only ever returns None)
     pass
-while f(): # E: "f" does not return a value
+while f(): # E: "f" does not return a value (it only ever returns None)
     pass
 def g() -> object:
-    return f() # E: "f" does not return a value
-raise f() # E: "f" does not return a value
+    return f() # E: "f" does not return a value (it only ever returns None)
+raise f() # E: "f" does not return a value (it only ever returns None)
 [builtins fixtures/exception.pyi]
 
 [case testNoneReturnTypeWithExpressions]
@@ -1115,13 +1115,13 @@ class A:
     def __add__(self, x: 'A') -> 'A': pass
 
 a: A
-[f()]       # E: "f" does not return a value
-f() + a     # E: "f" does not return a value
-a + f()     # E: "f" does not return a value
-f() == a    # E: "f" does not return a value
-a != f()    # E: "f" does not return a value
+[f()]       # E: "f" does not return a value (it only ever returns None)
+f() + a     # E: "f" does not return a value (it only ever returns None)
+a + f()     # E: "f" does not return a value (it only ever returns None)
+f() == a    # E: "f" does not return a value (it only ever returns None)
+a != f()    # E: "f" does not return a value (it only ever returns None)
 cast(A, f())
-f().foo     # E: "f" does not return a value
+f().foo     # E: "f" does not return a value (it only ever returns None)
 [builtins fixtures/list.pyi]
 
 [case testNoneReturnTypeWithExpressions2]
@@ -1134,14 +1134,14 @@ class A:
 
 a: A
 b: bool
-f() in a   # E: "f" does not return a value  # E: Unsupported right operand type for in ("A")
-a < f()    # E: "f" does not return a value
-f() <= a   # E: "f" does not return a value
-a in f()   # E: "f" does not return a value
--f()       # E: "f" does not return a value
-not f()    # E: "f" does not return a value
-f() and b  # E: "f" does not return a value
-b or f()   # E: "f" does not return a value
+f() in a   # E: "f" does not return a value (it only ever returns None)  # E: Unsupported right operand type for in ("A")
+a < f()    # E: "f" does not return a value (it only ever returns None)
+f() <= a   # E: "f" does not return a value (it only ever returns None)
+a in f()   # E: "f" does not return a value (it only ever returns None)
+-f()       # E: "f" does not return a value (it only ever returns None)
+not f()    # E: "f" does not return a value (it only ever returns None)
+f() and b  # E: "f" does not return a value (it only ever returns None)
+b or f()   # E: "f" does not return a value (it only ever returns None)
 [builtins fixtures/bool.pyi]
 
 
@@ -1441,7 +1441,7 @@ if int():
 [case testConditionalExpressionWithEmptyCondition]
 import typing
 def f() -> None: pass
-x = 1 if f() else 2 # E: "f" does not return a value
+x = 1 if f() else 2 # E: "f" does not return a value (it only ever returns None)
 
 [case testConditionalExpressionWithSubtyping]
 import typing
@@ -1504,7 +1504,7 @@ from typing import List, Union
 x = []
 y = ""
 x.append(y) if bool() else x.append(y)
-z = x.append(y) if bool() else x.append(y) # E: "append" of "list" does not return a value
+z = x.append(y) if bool() else x.append(y) # E: "append" of "list" does not return a value (it only ever returns None)
 [builtins fixtures/list.pyi]
 
 -- Special cases
@@ -1604,7 +1604,7 @@ def f(x: int) -> None:
 [builtins fixtures/for.pyi]
 [out]
 main:1: error: The return type of a generator function should be "Generator" or one of its supertypes
-main:2: error: "f" does not return a value
+main:2: error: "f" does not return a value (it only ever returns None)
 main:2: error: Argument 1 to "f" has incompatible type "str"; expected "int"
 
 [case testYieldExpressionWithNone]
@@ -1624,7 +1624,7 @@ from typing import Iterator
 def f() -> Iterator[int]:
     yield 5
 def g() -> Iterator[int]:
-    a = yield from f()  # E: Function does not return a value
+    a = yield from f()  # E: Function does not return a value (it only ever returns None)
 
 [case testYieldFromGeneratorHasValue]
 from typing import Iterator, Generator
@@ -1639,12 +1639,12 @@ def g() -> Iterator[int]:
 [case testYieldFromTupleExpression]
 from typing import Generator
 def g() -> Generator[int, None, None]:
-    x = yield from ()  # E: Function does not return a value
-    x = yield from (0, 1, 2)  # E: Function does not return a value
+    x = yield from ()  # E: Function does not return a value (it only ever returns None)
+    x = yield from (0, 1, 2)  # E: Function does not return a value (it only ever returns None)
     x = yield from (0, "ERROR")  # E: Incompatible types in "yield from" (actual type "object", expected type "int") \
-                                 # E: Function does not return a value
+                                 # E: Function does not return a value (it only ever returns None)
     x = yield from ("ERROR",)  # E: Incompatible types in "yield from" (actual type "str", expected type "int") \
-                               # E: Function does not return a value
+                               # E: Function does not return a value (it only ever returns None)
 [builtins fixtures/tuple.pyi]
 
 -- dict(...)
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
index 4cc523a595d1..cd098a84d4d3 100644
--- a/test-data/unit/check-functions.test
+++ b/test-data/unit/check-functions.test
@@ -250,7 +250,7 @@ if int():
 if int():
     f = o   # E: Incompatible types in assignment (expression has type "object", variable has type "Callable[[], None]")
 if int():
-    f = f() # E: Function does not return a value
+    f = f() # E: Function does not return a value (it only ever returns None)
 
 if int():
     f = f
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 36b028977591..f9a4d58c74af 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -33,8 +33,8 @@ class B: pass
 [case testLvarInitializedToVoid]
 import typing
 def f() -> None:
-    a = g()    # E: "g" does not return a value
-    #b, c = g() # "g" does not return a value TODO
+    a = g()    # E: "g" does not return a value (it only ever returns None)
+    #b, c = g() # "g" does not return a value (it only ever returns None) TODO
 
 def g() -> None: pass
 [out]
@@ -1180,7 +1180,7 @@ for e, f in [[]]:  # E: Need type annotation for "e" \
 [case testForStatementInferenceWithVoid]
 def f() -> None: pass
 
-for x in f(): # E: "f" does not return a value
+for x in f(): # E: "f" does not return a value (it only ever returns None)
     pass
 [builtins fixtures/for.pyi]
 
@@ -2118,7 +2118,7 @@ arr = []
 arr.append(arr.append(1))
 [builtins fixtures/list.pyi]
 [out]
-main:3: error: "append" of "list" does not return a value
+main:3: error: "append" of "list" does not return a value (it only ever returns None)
 
 -- Multipass
 -- ---------
diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test
index ae247b0047f1..70f3c4486e14 100644
--- a/test-data/unit/check-optional.test
+++ b/test-data/unit/check-optional.test
@@ -361,9 +361,9 @@ def f() -> None:
 def g(x: Optional[int]) -> int:
   pass
 
-x = f()  # E: "f" does not return a value
-f() + 1  # E: "f" does not return a value
-g(f())  # E: "f" does not return a value
+x = f()  # E: "f" does not return a value (it only ever returns None)
+f() + 1  # E: "f" does not return a value (it only ever returns None)
+g(f())  # E: "f" does not return a value (it only ever returns None)
 
 [case testEmptyReturn]
 def f() -> None:
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index cff261774663..0e7c81edc498 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -169,8 +169,8 @@ class C(B): pass
 import typing
 def f() -> None: pass
 
-(None, f()) # E: "f" does not return a value
-(f(), None) # E: "f" does not return a value
+(None, f()) # E: "f" does not return a value (it only ever returns None)
+(f(), None) # E: "f" does not return a value (it only ever returns None)
 [builtins fixtures/tuple.pyi]
 
 
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
index 54546f3973b3..ef2c3c57fad5 100644
--- a/test-data/unit/check-varargs.test
+++ b/test-data/unit/check-varargs.test
@@ -52,8 +52,8 @@ c: C
 
 f(c)       # E: Argument 1 to "f" has incompatible type "C"; expected "A"
 f(a, b, c) # E: Argument 3 to "f" has incompatible type "C"; expected "A"
-f(g())     # E: "g" does not return a value
-f(a, g())  # E: "g" does not return a value
+f(g())     # E: "g" does not return a value (it only ever returns None)
+f(a, g())  # E: "g" does not return a value (it only ever returns None)
 f()
 f(a)
 f(b)
diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test
index 069374b9635c..4a185557495b 100644
--- a/test-data/unit/pythoneval-asyncio.test
+++ b/test-data/unit/pythoneval-asyncio.test
@@ -261,7 +261,7 @@ try:
 finally:
     loop.close()
 [out]
-_program.py:11: error: Function does not return a value
+_program.py:11: error: Function does not return a value (it only ever returns None)
 
 [case testErrorReturnIsNotTheSameType]
 from typing import Any

From 5adf934804c512c37a9506aeae426622abf92cb5 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 2 Sep 2023 00:53:35 -0700
Subject: [PATCH 099/288] Fix crash with report generation on namespace
 packages (again) (#16019)

Fixes #15979. Fix is similar to that in `iterate_python_lines`.
---
 mypy/report.py | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/mypy/report.py b/mypy/report.py
index d5f16464c0fb..86fcee0521a6 100644
--- a/mypy/report.py
+++ b/mypy/report.py
@@ -171,8 +171,12 @@ def on_file(
     ) -> None:
         # Count physical lines.  This assumes the file's encoding is a
         # superset of ASCII (or at least uses \n in its line endings).
-        with open(tree.path, "rb") as f:
-            physical_lines = len(f.readlines())
+        try:
+            with open(tree.path, "rb") as f:
+                physical_lines = len(f.readlines())
+        except IsADirectoryError:
+            # can happen with namespace packages
+            physical_lines = 0
 
         func_counter = FuncCounterVisitor()
         tree.accept(func_counter)

From fb32db7237ec1847960c93cfb17c8f24182d5d77 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Sat, 2 Sep 2023 17:09:03 -0400
Subject: [PATCH 100/288] docs: document dataclass_transform behavior (#16017)

Document behavior discussed in
https://github.com/python/typing/discussions/1456.

---------

Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
---
 docs/source/additional_features.rst | 45 +++++++++++++++++++++--------
 1 file changed, 33 insertions(+), 12 deletions(-)

diff --git a/docs/source/additional_features.rst b/docs/source/additional_features.rst
index 5dd136476eaa..ae625c157654 100644
--- a/docs/source/additional_features.rst
+++ b/docs/source/additional_features.rst
@@ -71,12 +71,12 @@ and :pep:`557`.
 Caveats/Known Issues
 ====================
 
-Some functions in the :py:mod:`dataclasses` module, such as :py:func:`~dataclasses.replace` and :py:func:`~dataclasses.asdict`,
+Some functions in the :py:mod:`dataclasses` module, such as :py:func:`~dataclasses.asdict`,
 have imprecise (too permissive) types. This will be fixed in future releases.
 
 Mypy does not yet recognize aliases of :py:func:`dataclasses.dataclass <dataclasses.dataclass>`, and will
-probably never recognize dynamically computed decorators. The following examples
-do **not** work:
+probably never recognize dynamically computed decorators. The following example
+does **not** work:
 
 .. code-block:: python
 
@@ -94,16 +94,37 @@ do **not** work:
       """
       attribute: int
 
-    @dataclass_wrapper
-    class DynamicallyDecorated:
-      """
-      Mypy doesn't recognize this as a dataclass because it is decorated by a
-      function returning `dataclass` rather than by `dataclass` itself.
-      """
-      attribute: int
-
     AliasDecorated(attribute=1) # error: Unexpected keyword argument
-    DynamicallyDecorated(attribute=1) # error: Unexpected keyword argument
+
+
+To have Mypy recognize a wrapper of :py:func:`dataclasses.dataclass <dataclasses.dataclass>`
+as a dataclass decorator, consider using the :py:func:`~typing.dataclass_transform` decorator:
+
+.. code-block:: python
+
+    from dataclasses import dataclass, Field
+    from typing import TypeVar, dataclass_transform
+
+    T = TypeVar('T')
+
+    @dataclass_transform(field_specifiers=(Field,))
+    def my_dataclass(cls: type[T]) -> type[T]:
+        ...
+        return dataclass(cls)
+
+
+Data Class Transforms
+*********************
+
+Mypy supports the :py:func:`~typing.dataclass_transform` decorator as described in
+`PEP 681 <https://www.python.org/dev/peps/pep-0681/#the-dataclass-transform-decorator>`_.
+
+.. note::
+
+    Pragmatically, mypy will assume such classes have the internal attribute :code:`__dataclass_fields__`
+    (even though they might lack it in runtime) and will assume functions such as :py:func:`dataclasses.is_dataclass`
+    and :py:func:`dataclasses.fields` treat them as if they were dataclasses
+    (even though they may fail at runtime).
 
 .. _attrs_package:
 

From 6a6d2e8a2d919af7557063de8f1faa580969b011 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Sat, 2 Sep 2023 17:09:34 -0400
Subject: [PATCH 101/288] meta tests: refactor run_pytest (#15481)

Factor `run_pytest` out of mypy/test/meta/test_*.py.
---
 mypy/test/meta/_pytest.py          | 72 ++++++++++++++++++++++++++++++
 mypy/test/meta/test_parse_data.py  | 65 +++++++--------------------
 mypy/test/meta/test_update_data.py | 53 +++++++---------------
 3 files changed, 104 insertions(+), 86 deletions(-)
 create mode 100644 mypy/test/meta/_pytest.py

diff --git a/mypy/test/meta/_pytest.py b/mypy/test/meta/_pytest.py
new file mode 100644
index 000000000000..b8648f033143
--- /dev/null
+++ b/mypy/test/meta/_pytest.py
@@ -0,0 +1,72 @@
+import shlex
+import subprocess
+import sys
+import textwrap
+import uuid
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Iterable
+
+from mypy.test.config import test_data_prefix
+
+
+@dataclass
+class PytestResult:
+    input: str
+    input_updated: str  # any updates made by --update-data
+    stdout: str
+    stderr: str
+
+
+def dedent_docstring(s: str) -> str:
+    return textwrap.dedent(s).lstrip()
+
+
+def run_pytest_data_suite(
+    data_suite: str,
+    *,
+    data_file_prefix: str = "check",
+    pytest_node_prefix: str = "mypy/test/testcheck.py::TypeCheckSuite",
+    extra_args: Iterable[str],
+    max_attempts: int,
+) -> PytestResult:
+    """
+    Runs a suite of data test cases through pytest until either tests pass
+    or until a maximum number of attempts (needed for incremental tests).
+
+    :param data_suite: the actual "suite" i.e. the contents of a .test file
+    """
+    p_test_data = Path(test_data_prefix)
+    p_root = p_test_data.parent.parent
+    p = p_test_data / f"{data_file_prefix}-meta-{uuid.uuid4()}.test"
+    assert not p.exists()
+    data_suite = dedent_docstring(data_suite)
+    try:
+        p.write_text(data_suite)
+
+        test_nodeid = f"{pytest_node_prefix}::{p.name}"
+        extra_args = [sys.executable, "-m", "pytest", "-n", "0", "-s", *extra_args, test_nodeid]
+        cmd = shlex.join(extra_args)
+        for i in range(max_attempts - 1, -1, -1):
+            print(f">> {cmd}")
+            proc = subprocess.run(extra_args, capture_output=True, check=False, cwd=p_root)
+            if proc.returncode == 0:
+                break
+            prefix = "NESTED PYTEST STDOUT"
+            for line in proc.stdout.decode().splitlines():
+                print(f"{prefix}: {line}")
+                prefix = " " * len(prefix)
+            prefix = "NESTED PYTEST STDERR"
+            for line in proc.stderr.decode().splitlines():
+                print(f"{prefix}: {line}")
+                prefix = " " * len(prefix)
+            print(f"Exit code {proc.returncode} ({i} attempts remaining)")
+
+        return PytestResult(
+            input=data_suite,
+            input_updated=p.read_text(),
+            stdout=proc.stdout.decode(),
+            stderr=proc.stderr.decode(),
+        )
+    finally:
+        p.unlink()
diff --git a/mypy/test/meta/test_parse_data.py b/mypy/test/meta/test_parse_data.py
index 6593dbc45704..797fdd7b2c8c 100644
--- a/mypy/test/meta/test_parse_data.py
+++ b/mypy/test/meta/test_parse_data.py
@@ -2,37 +2,18 @@
 A "meta test" which tests the parsing of .test files. This is not meant to become exhaustive
 but to ensure we maintain a basic level of ergonomics for mypy contributors.
 """
-import subprocess
-import sys
-import textwrap
-import uuid
-from pathlib import Path
-
-from mypy.test.config import test_data_prefix
 from mypy.test.helpers import Suite
+from mypy.test.meta._pytest import PytestResult, run_pytest_data_suite
 
 
-class ParseTestDataSuite(Suite):
-    def _dedent(self, s: str) -> str:
-        return textwrap.dedent(s).lstrip()
+def _run_pytest(data_suite: str) -> PytestResult:
+    return run_pytest_data_suite(data_suite, extra_args=[], max_attempts=1)
 
-    def _run_pytest(self, data_suite: str) -> str:
-        p_test_data = Path(test_data_prefix)
-        p_root = p_test_data.parent.parent
-        p = p_test_data / f"check-meta-{uuid.uuid4()}.test"
-        assert not p.exists()
-        try:
-            p.write_text(data_suite)
-            test_nodeid = f"mypy/test/testcheck.py::TypeCheckSuite::{p.name}"
-            args = [sys.executable, "-m", "pytest", "-n", "0", "-s", test_nodeid]
-            proc = subprocess.run(args, cwd=p_root, capture_output=True, check=False)
-            return proc.stdout.decode()
-        finally:
-            p.unlink()
 
+class ParseTestDataSuite(Suite):
     def test_parse_invalid_case(self) -> None:
-        # Arrange
-        data = self._dedent(
+        # Act
+        result = _run_pytest(
             """
             [case abc]
             s: str
@@ -41,15 +22,12 @@ def test_parse_invalid_case(self) -> None:
             """
         )
 
-        # Act
-        actual = self._run_pytest(data)
-
         # Assert
-        assert "Invalid testcase id 'foo-XFAIL'" in actual
+        assert "Invalid testcase id 'foo-XFAIL'" in result.stdout
 
     def test_parse_invalid_section(self) -> None:
-        # Arrange
-        data = self._dedent(
+        # Act
+        result = _run_pytest(
             """
             [case abc]
             s: str
@@ -58,19 +36,16 @@ def test_parse_invalid_section(self) -> None:
             """
         )
 
-        # Act
-        actual = self._run_pytest(data)
-
         # Assert
-        expected_lineno = data.splitlines().index("[unknownsection]") + 1
+        expected_lineno = result.input.splitlines().index("[unknownsection]") + 1
         expected = (
             f".test:{expected_lineno}: Invalid section header [unknownsection] in case 'abc'"
         )
-        assert expected in actual
+        assert expected in result.stdout
 
     def test_bad_ge_version_check(self) -> None:
-        # Arrange
-        data = self._dedent(
+        # Act
+        actual = _run_pytest(
             """
             [case abc]
             s: str
@@ -79,15 +54,12 @@ def test_bad_ge_version_check(self) -> None:
             """
         )
 
-        # Act
-        actual = self._run_pytest(data)
-
         # Assert
-        assert "version>=3.8 always true since minimum runtime version is (3, 8)" in actual
+        assert "version>=3.8 always true since minimum runtime version is (3, 8)" in actual.stdout
 
     def test_bad_eq_version_check(self) -> None:
-        # Arrange
-        data = self._dedent(
+        # Act
+        actual = _run_pytest(
             """
             [case abc]
             s: str
@@ -96,8 +68,5 @@ def test_bad_eq_version_check(self) -> None:
             """
         )
 
-        # Act
-        actual = self._run_pytest(data)
-
         # Assert
-        assert "version==3.7 always false since minimum runtime version is (3, 8)" in actual
+        assert "version==3.7 always false since minimum runtime version is (3, 8)" in actual.stdout
diff --git a/mypy/test/meta/test_update_data.py b/mypy/test/meta/test_update_data.py
index 4e4bdd193dbf..40b70157a0e3 100644
--- a/mypy/test/meta/test_update_data.py
+++ b/mypy/test/meta/test_update_data.py
@@ -3,47 +3,23 @@
 Updating the expected output, especially when it's in the form of inline (comment) assertions,
 can be brittle, which is why we're "meta-testing" here.
 """
-import shlex
-import subprocess
-import sys
-import textwrap
-import uuid
-from pathlib import Path
-
-from mypy.test.config import test_data_prefix
 from mypy.test.helpers import Suite
+from mypy.test.meta._pytest import PytestResult, dedent_docstring, run_pytest_data_suite
 
 
-class UpdateDataSuite(Suite):
-    def _run_pytest_update_data(self, data_suite: str, *, max_attempts: int) -> str:
-        """
-        Runs a suite of data test cases through 'pytest --update-data' until either tests pass
-        or until a maximum number of attempts (needed for incremental tests).
-        """
-        p_test_data = Path(test_data_prefix)
-        p_root = p_test_data.parent.parent
-        p = p_test_data / f"check-meta-{uuid.uuid4()}.test"
-        assert not p.exists()
-        try:
-            p.write_text(textwrap.dedent(data_suite).lstrip())
-
-            test_nodeid = f"mypy/test/testcheck.py::TypeCheckSuite::{p.name}"
-            args = [sys.executable, "-m", "pytest", "-n", "0", "-s", "--update-data", test_nodeid]
-            cmd = shlex.join(args)
-            for i in range(max_attempts - 1, -1, -1):
-                res = subprocess.run(args, cwd=p_root)
-                if res.returncode == 0:
-                    break
-                print(f"`{cmd}` returned {res.returncode}: {i} attempts remaining")
-
-            return p.read_text()
-        finally:
-            p.unlink()
+def _run_pytest_update_data(data_suite: str) -> PytestResult:
+    """
+    Runs a suite of data test cases through 'pytest --update-data' until either tests pass
+    or until a maximum number of attempts (needed for incremental tests).
+    """
+    return run_pytest_data_suite(data_suite, extra_args=["--update-data"], max_attempts=3)
+
 
+class UpdateDataSuite(Suite):
     def test_update_data(self) -> None:
         # Note: We test multiple testcases rather than 'test case per test case'
         #       so we could also exercise rewriting multiple testcases at once.
-        actual = self._run_pytest_update_data(
+        result = _run_pytest_update_data(
             """
             [case testCorrect]
             s: str = 42  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
@@ -97,12 +73,12 @@ def test_update_data(self) -> None:
             [file b.py]
             s2: str = 43  # E: baz
             [builtins fixtures/list.pyi]
-            """,
-            max_attempts=3,
+            """
         )
 
         # Assert
-        expected = """
+        expected = dedent_docstring(
+            """
         [case testCorrect]
         s: str = 42  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
 
@@ -154,4 +130,5 @@ def test_update_data(self) -> None:
         s2: str = 43  # E: Incompatible types in assignment (expression has type "int", variable has type "str")
         [builtins fixtures/list.pyi]
         """
-        assert actual == textwrap.dedent(expected).lstrip()
+        )
+        assert result.input_updated == expected

From cc8a4b50f7e65004a97c9ba51c69f7c9340370d9 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Sat, 2 Sep 2023 17:29:42 -0400
Subject: [PATCH 102/288] Document we're not tracking relationships between
 symbols (#16018)

Fixes #15653.

I did not use erictraut's "quantum entanglement" metaphor, though I
find it to be quite illustrative :)
---
 docs/source/type_narrowing.rst | 42 ++++++++++++++++++++++++++++++----
 1 file changed, 37 insertions(+), 5 deletions(-)

diff --git a/docs/source/type_narrowing.rst b/docs/source/type_narrowing.rst
index 4bc0fda70138..4c5c2851edd0 100644
--- a/docs/source/type_narrowing.rst
+++ b/docs/source/type_narrowing.rst
@@ -3,7 +3,7 @@
 Type narrowing
 ==============
 
-This section is dedicated to  several type narrowing
+This section is dedicated to several type narrowing
 techniques which are supported by mypy.
 
 Type narrowing is when you convince a type checker that a broader type is actually more specific, for instance, that an object of type ``Shape`` is actually of the narrower type ``Square``.
@@ -14,10 +14,11 @@ Type narrowing expressions
 
 The simplest way to narrow a type is to use one of the supported expressions:
 
-- :py:func:`isinstance` like in ``isinstance(obj, float)`` will narrow ``obj`` to have ``float`` type
-- :py:func:`issubclass` like in ``issubclass(cls, MyClass)`` will narrow ``cls`` to be ``Type[MyClass]``
-- :py:class:`type` like in ``type(obj) is int`` will narrow ``obj`` to have ``int`` type
-- :py:func:`callable` like in ``callable(obj)`` will narrow object to callable type
+- :py:func:`isinstance` like in :code:`isinstance(obj, float)` will narrow ``obj`` to have ``float`` type
+- :py:func:`issubclass` like in :code:`issubclass(cls, MyClass)` will narrow ``cls`` to be ``Type[MyClass]``
+- :py:class:`type` like in :code:`type(obj) is int` will narrow ``obj`` to have ``int`` type
+- :py:func:`callable` like in :code:`callable(obj)` will narrow object to callable type
+- :code:`obj is not None` will narrow object to its :ref:`non-optional form <strict_optional>`
 
 Type narrowing is contextual. For example, based on the condition, mypy will narrow an expression only within an ``if`` branch:
 
@@ -83,6 +84,7 @@ We can also use ``assert`` to narrow types in the same context:
      reveal_type(x)  # Revealed type is "builtins.int"
      print(x + '!')  # Typechecks with `mypy`, but fails in runtime.
 
+
 issubclass
 ~~~~~~~~~~
 
@@ -359,3 +361,33 @@ What happens here?
 .. note::
 
   The same will work with ``isinstance(x := a, float)`` as well.
+
+Limitations
+-----------
+
+Mypy's analysis is limited to individual symbols and it will not track
+relationships between symbols. For example, in the following code
+it's easy to deduce that if :code:`a` is None then :code:`b` must not be,
+therefore :code:`a or b` will always be a string, but Mypy will not be able to tell that:
+
+.. code-block:: python
+
+    def f(a: str | None, b: str | None) -> str:
+        if a is not None or b is not None:
+            return a or b  # Incompatible return value type (got "str | None", expected "str")
+        return 'spam'
+
+Tracking these sort of cross-variable conditions in a type checker would add significant complexity
+and performance overhead.
+
+You can use an ``assert`` to convince the type checker, override it with a :ref:`cast <casts>`
+or rewrite the function to be slightly more verbose:
+
+.. code-block:: python
+
+    def f(a: str | None, b: str | None) -> str:
+        if a is not None:
+            return a
+        elif b is not None:
+            return b
+        return 'spam'

From f83d6eb9070137f0b060bb5a8b81858bf8910424 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Sun, 3 Sep 2023 03:59:28 -0400
Subject: [PATCH 103/288] ruff: add pyupgrade (#16023)

For example, this [review
comment](https://github.com/python/mypy/pull/15481#discussion_r1313755961)
could've been spared with
[UP036](https://beta.ruff.rs/docs/rules/outdated-version-block/).
---
 mypy/checkmember.py   |  4 ++--
 mypy/config_parser.py | 22 +++++++++++++---------
 mypy/main.py          |  3 +--
 mypy/messages.py      | 15 +++++++--------
 mypy/solve.py         |  4 ++--
 mypyc/ir/class_ir.py  |  2 +-
 mypyc/ir/ops.py       |  4 ++--
 mypyc/ir/rtypes.py    |  4 ++--
 pyproject.toml        |  4 +++-
 9 files changed, 33 insertions(+), 29 deletions(-)

diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index f7d002f17eb9..60430839ff62 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -2,7 +2,7 @@
 
 from __future__ import annotations
 
-from typing import TYPE_CHECKING, Callable, Optional, Sequence, cast
+from typing import TYPE_CHECKING, Callable, Sequence, cast
 
 from mypy import meet, message_registry, subtypes
 from mypy.erasetype import erase_typevars
@@ -777,7 +777,7 @@ def analyze_var(
         result: Type = t
         typ = get_proper_type(typ)
 
-        call_type: Optional[ProperType] = None
+        call_type: ProperType | None = None
         if var.is_initialized_in_class and (not is_instance_var(var) or mx.is_operator):
             if isinstance(typ, FunctionLike) and not typ.is_type_obj():
                 call_type = typ
diff --git a/mypy/config_parser.py b/mypy/config_parser.py
index 7748c3b25966..4dbd6477c81e 100644
--- a/mypy/config_parser.py
+++ b/mypy/config_parser.py
@@ -292,14 +292,18 @@ def parse_config_file(
             )
             if report_dirs:
                 print(
-                    "%sPer-module sections should not specify reports (%s)"
-                    % (prefix, ", ".join(s + "_report" for s in sorted(report_dirs))),
+                    prefix,
+                    "Per-module sections should not specify reports ({})".format(
+                        ", ".join(s + "_report" for s in sorted(report_dirs))
+                    ),
                     file=stderr,
                 )
             if set(updates) - PER_MODULE_OPTIONS:
                 print(
-                    "%sPer-module sections should only specify per-module flags (%s)"
-                    % (prefix, ", ".join(sorted(set(updates) - PER_MODULE_OPTIONS))),
+                    prefix,
+                    "Per-module sections should only specify per-module flags ({})".format(
+                        ", ".join(sorted(set(updates) - PER_MODULE_OPTIONS))
+                    ),
                     file=stderr,
                 )
                 updates = {k: v for k, v in updates.items() if k in PER_MODULE_OPTIONS}
@@ -315,8 +319,9 @@ def parse_config_file(
                     "*" in x and x != "*" for x in glob.split(".")
                 ):
                     print(
-                        "%sPatterns must be fully-qualified module names, optionally "
-                        "with '*' in some components (e.g spam.*.eggs.*)" % prefix,
+                        prefix,
+                        "Patterns must be fully-qualified module names, optionally "
+                        "with '*' in some components (e.g spam.*.eggs.*)",
                         file=stderr,
                     )
                 else:
@@ -329,7 +334,7 @@ def get_prefix(file_read: str, name: str) -> str:
     else:
         module_name_str = name
 
-    return f"{file_read}: [{module_name_str}]: "
+    return f"{file_read}: [{module_name_str}]:"
 
 
 def is_toml(filename: str) -> bool:
@@ -411,8 +416,7 @@ def destructure_overrides(toml_data: dict[str, Any]) -> dict[str, Any]:
                         raise ConfigTOMLValueError(
                             "toml config file contains "
                             "[[tool.mypy.overrides]] sections with conflicting "
-                            "values. Module '%s' has two different values for '%s'"
-                            % (module, new_key)
+                            f"values. Module '{module}' has two different values for '{new_key}'"
                         )
                     result[old_config_name][new_key] = new_value
 
diff --git a/mypy/main.py b/mypy/main.py
index 30f6cfe97455..a4357dca7890 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -187,8 +187,7 @@ def flush_errors(new_messages: list[str], serious: bool) -> None:
         and not options.non_interactive
     ):
         print(
-            "Warning: unused section(s) in %s: %s"
-            % (
+            "Warning: unused section(s) in {}: {}".format(
                 options.config_file,
                 get_config_module_names(
                     options.config_file,
diff --git a/mypy/messages.py b/mypy/messages.py
index 4b71bd876dcc..bba9c3c3cdea 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -1454,20 +1454,19 @@ def cannot_determine_type_in_base(self, name: str, base: str, context: Context)
         self.fail(f'Cannot determine type of "{name}" in base class "{base}"', context)
 
     def no_formal_self(self, name: str, item: CallableType, context: Context) -> None:
+        type = format_type(item, self.options)
         self.fail(
-            'Attribute function "%s" with type %s does not accept self argument'
-            % (name, format_type(item, self.options)),
-            context,
+            f'Attribute function "{name}" with type {type} does not accept self argument', context
         )
 
     def incompatible_self_argument(
         self, name: str, arg: Type, sig: CallableType, is_classmethod: bool, context: Context
     ) -> None:
         kind = "class attribute function" if is_classmethod else "attribute function"
+        arg_type = format_type(arg, self.options)
+        sig_type = format_type(sig, self.options)
         self.fail(
-            'Invalid self argument %s to %s "%s" with type %s'
-            % (format_type(arg, self.options), kind, name, format_type(sig, self.options)),
-            context,
+            f'Invalid self argument {arg_type} to {kind} "{name}" with type {sig_type}', context
         )
 
     def incompatible_conditional_function_def(
@@ -1487,8 +1486,8 @@ def cannot_instantiate_abstract_class(
     ) -> None:
         attrs = format_string_list([f'"{a}"' for a in abstract_attributes])
         self.fail(
-            'Cannot instantiate abstract class "%s" with abstract '
-            "attribute%s %s" % (class_name, plural_s(abstract_attributes), attrs),
+            f'Cannot instantiate abstract class "{class_name}" with abstract '
+            f"attribute{plural_s(abstract_attributes)} {attrs}",
             context,
             code=codes.ABSTRACT,
         )
diff --git a/mypy/solve.py b/mypy/solve.py
index 95377ea9f93e..17e1ca047818 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -3,7 +3,7 @@
 from __future__ import annotations
 
 from collections import defaultdict
-from typing import Iterable, Sequence, Tuple
+from typing import Iterable, Sequence
 from typing_extensions import TypeAlias as _TypeAlias
 
 from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints
@@ -333,7 +333,7 @@ def is_trivial_bound(tp: ProperType) -> bool:
     return isinstance(tp, Instance) and tp.type.fullname == "builtins.object"
 
 
-def find_linear(c: Constraint) -> Tuple[bool, TypeVarId | None]:
+def find_linear(c: Constraint) -> tuple[bool, TypeVarId | None]:
     """Find out if this constraint represent a linear relationship, return target id if yes."""
     if isinstance(c.origin_type_var, TypeVarType):
         if isinstance(c.target, TypeVarType):
diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py
index 682e30629118..61f0fc36e1b3 100644
--- a/mypyc/ir/class_ir.py
+++ b/mypyc/ir/class_ir.py
@@ -70,7 +70,7 @@
 
 
 class VTableMethod(NamedTuple):
-    cls: "ClassIR"
+    cls: "ClassIR"  # noqa: UP037
     name: str
     method: FuncIR
     shadow_method: FuncIR | None
diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py
index d80c479211b7..2d64cc79d822 100644
--- a/mypyc/ir/ops.py
+++ b/mypyc/ir/ops.py
@@ -1575,5 +1575,5 @@ def visit_keep_alive(self, op: KeepAlive) -> T:
 # (Serialization and deserialization *will* be used for incremental
 # compilation but so far it is not hooked up to anything.)
 class DeserMaps(NamedTuple):
-    classes: dict[str, "ClassIR"]
-    functions: dict[str, "FuncIR"]
+    classes: dict[str, ClassIR]
+    functions: dict[str, FuncIR]
diff --git a/mypyc/ir/rtypes.py b/mypyc/ir/rtypes.py
index fa46feb0b59a..fecfaee5ef77 100644
--- a/mypyc/ir/rtypes.py
+++ b/mypyc/ir/rtypes.py
@@ -23,8 +23,8 @@
 from __future__ import annotations
 
 from abc import abstractmethod
-from typing import TYPE_CHECKING, ClassVar, Generic, TypeVar
-from typing_extensions import Final, TypeGuard
+from typing import TYPE_CHECKING, ClassVar, Final, Generic, TypeVar
+from typing_extensions import TypeGuard
 
 from mypyc.common import IS_32_BIT_PLATFORM, PLATFORM_SIZE, JsonDict, short_name
 from mypyc.namegen import NameGenerator
diff --git a/pyproject.toml b/pyproject.toml
index 67201acb9b94..18ba23671d9c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -36,7 +36,8 @@ select = [
   "B",       # flake8-bugbear
   "I",       # isort
   "RUF100",  # Unused noqa comments
-  "PGH004"   # blanket noqa comments
+  "PGH004",  # blanket noqa comments
+  "UP",      # pyupgrade
 ]
 
 ignore = [
@@ -49,6 +50,7 @@ ignore = [
   "E501",  # conflicts with black
   "E731",  # Do not assign a `lambda` expression, use a `def`
   "E741",  # Ambiguous variable name
+  "UP032", # 'f-string always preferable to format' is controversial
 ]
 
 unfixable = [

From 87365eb3b2ef5f89c19de2708a826f3c80e914a6 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sun, 3 Sep 2023 15:24:24 +0300
Subject: [PATCH 104/288] Exclude `assert False` from coverage (#16026)

---
 pyproject.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/pyproject.toml b/pyproject.toml
index 18ba23671d9c..1d6562756e22 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -121,5 +121,6 @@ exclude_lines = [
   '^\s*raise NotImplementedError\b',
   '^\s*return NotImplemented\b',
   '^\s*raise$',
+  '^assert False\b',
   '''^if __name__ == ['"]__main__['"]:$''',
 ]

From 92e054b7dad3641fe74326ef60e773b974ca614f Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sun, 3 Sep 2023 20:01:51 +0300
Subject: [PATCH 105/288] Do not set `is_final` twice for `FuncBase` subclasses
 (#16030)

---
 mypy/nodes.py | 2 --
 1 file changed, 2 deletions(-)

diff --git a/mypy/nodes.py b/mypy/nodes.py
index 9b4ba5e76667..db42dd6b3949 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -573,7 +573,6 @@ def __init__(self, items: list[OverloadPart]) -> None:
         if items:
             # TODO: figure out how to reliably set end position (we don't know the impl here).
             self.set_line(items[0].line, items[0].column)
-        self.is_final = False
 
     @property
     def name(self) -> str:
@@ -772,7 +771,6 @@ def __init__(
         # Is this an abstract method with trivial body?
         # Such methods can't be called via super().
         self.is_trivial_body = False
-        self.is_final = False
         # Original conditional definition
         self.original_def: None | FuncDef | Var | Decorator = None
         # Used for error reporting (to keep backward compatibility with pre-3.8)

From 488ad4f31dca387f87093e8d0b0fef2e021daa0b Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Mon, 4 Sep 2023 12:53:26 +0300
Subject: [PATCH 106/288] Bundle `misc/proper_plugin.py` as a part of `mypy`
 (#16036)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

I modified

```diff
diff --git mypy/binder.py mypy/binder.py
index 8a68f24f6..194883f86 100644
--- mypy/binder.py
+++ mypy/binder.py
@@ -345,7 +345,7 @@ class ConditionalTypeBinder:
             self._cleanse_key(dep)

     def most_recent_enclosing_type(self, expr: BindableExpression, type: Type) -> Type | None:
-        type = get_proper_type(type)
+        # type = get_proper_type(type)
         if isinstance(type, AnyType):
             return get_declaration(expr)
         key = literal_hash(expr)
```

to see if it still works. It is:

```python
» python runtests.py self
run self: ['/Users/sobolev/Desktop/mypy/.venv/bin/python', '-m', 'mypy', '--config-file', 'mypy_self_check.ini', '-p', 'mypy', '-p', 'mypyc']
mypy/binder.py:349: error: Never apply isinstance() to unexpanded types; use
mypy.types.get_proper_type() first  [misc]
            if isinstance(type, AnyType):
               ^~~~~~~~~~~~~~~~~~~~~~~~~
mypy/binder.py:349: note: If you pass on the original type after the check, always use its unexpanded version
Found 1 error in 1 file (checked 288 source files)

FAILED: self
```

I will add this plugin to my CI checks in like ~5 plugins I maintain :)

- https://github.com/typeddjango/django-stubs
- https://github.com/typeddjango/djangorestframework-stubs
- https://github.com/dry-python/returns
- https://github.com/dry-python/classes
- https://github.com/wemake-services/mypy-extras

Closes https://github.com/python/mypy/issues/16035
---
 MANIFEST.in                             | 1 -
 docs/source/extending_mypy.rst          | 9 +++++++++
 {misc => mypy/plugins}/proper_plugin.py | 9 +++++++++
 mypy_self_check.ini                     | 2 +-
 4 files changed, 19 insertions(+), 2 deletions(-)
 rename {misc => mypy/plugins}/proper_plugin.py (95%)

diff --git a/MANIFEST.in b/MANIFEST.in
index b77b762b4852..a1c15446de3f 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -34,7 +34,6 @@ include build-requirements.txt
 include test-requirements.txt
 include mypy_self_check.ini
 prune misc
-include misc/proper_plugin.py
 graft test-data
 include conftest.py
 include runtests.py
diff --git a/docs/source/extending_mypy.rst b/docs/source/extending_mypy.rst
index 506f548db687..bbbec2ad3880 100644
--- a/docs/source/extending_mypy.rst
+++ b/docs/source/extending_mypy.rst
@@ -237,3 +237,12 @@ mypy's cache for that module so that it can be rechecked. This hook
 should be used to report to mypy any relevant configuration data,
 so that mypy knows to recheck the module if the configuration changes.
 The hooks should return data encodable as JSON.
+
+Useful tools
+************
+
+Mypy ships ``mypy.plugins.proper_plugin`` plugin which can be useful
+for plugin authors, since it finds missing ``get_proper_type()`` calls,
+which is a pretty common mistake.
+
+It is recommended to enable it is a part of your plugin's CI.
diff --git a/misc/proper_plugin.py b/mypy/plugins/proper_plugin.py
similarity index 95%
rename from misc/proper_plugin.py
rename to mypy/plugins/proper_plugin.py
index a6e6dc03b625..ab93f0d126db 100644
--- a/misc/proper_plugin.py
+++ b/mypy/plugins/proper_plugin.py
@@ -1,3 +1,12 @@
+"""
+This plugin is helpful for mypy development itself.
+By default, it is not enabled for mypy users.
+
+It also can be used by plugin developers as a part of their CI checks.
+
+It finds missing ``get_proper_type()`` call, which can lead to multiple errors.
+"""
+
 from __future__ import annotations
 
 from typing import Callable
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
index fcdbe641d6d6..6e1ad8187b7a 100644
--- a/mypy_self_check.ini
+++ b/mypy_self_check.ini
@@ -5,7 +5,7 @@ disallow_any_unimported = True
 show_traceback = True
 pretty = True
 always_false = MYPYC
-plugins = misc/proper_plugin.py
+plugins = mypy.plugins.proper_plugin
 python_version = 3.8
 exclude = mypy/typeshed/|mypyc/test-data/|mypyc/lib-rt/
 new_type_inference = True

From 8738886861682e0d168ea321c2cc6ee5b566cb8b Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Mon, 4 Sep 2023 13:23:42 +0300
Subject: [PATCH 107/288] Add type annotations to `test-data/unit/plugins`
 (#16028)

Closes https://github.com/python/mypy/issues/16027
---
 mypy/plugin.py                                |  2 +-
 test-data/unit/plugins/add_classmethod.py     |  8 ++--
 test-data/unit/plugins/arg_kinds.py           | 19 ++++----
 test-data/unit/plugins/arg_names.py           | 48 ++++++++++++-------
 test-data/unit/plugins/attrhook.py            | 14 +++---
 test-data/unit/plugins/attrhook2.py           | 16 ++++---
 test-data/unit/plugins/badreturn.py           |  2 +-
 test-data/unit/plugins/badreturn2.py          |  6 ++-
 test-data/unit/plugins/callable_instance.py   | 19 +++++---
 test-data/unit/plugins/class_attr_hook.py     | 15 +++---
 test-data/unit/plugins/class_callable.py      | 41 ++++++++++------
 .../unit/plugins/common_api_incremental.py    | 36 +++++++-------
 test-data/unit/plugins/config_data.py         |  9 ++--
 test-data/unit/plugins/custom_errorcode.py    | 14 ++++--
 test-data/unit/plugins/customentry.py         | 20 +++++---
 test-data/unit/plugins/customize_mro.py       | 15 ++++--
 test-data/unit/plugins/decimal_to_int.py      | 19 +++++---
 test-data/unit/plugins/depshook.py            | 12 ++---
 test-data/unit/plugins/descriptor.py          | 32 ++++++++-----
 test-data/unit/plugins/dyn_class.py           | 46 +++++++++++-------
 .../unit/plugins/dyn_class_from_method.py     | 24 +++++++---
 test-data/unit/plugins/fnplugin.py            | 20 +++++---
 .../unit/plugins/fully_qualified_test_hook.py | 24 +++++++---
 test-data/unit/plugins/function_sig_hook.py   | 20 ++++----
 test-data/unit/plugins/method_in_decorator.py | 22 +++++----
 test-data/unit/plugins/method_sig_hook.py     | 27 +++++++----
 test-data/unit/plugins/named_callable.py      | 31 +++++++-----
 test-data/unit/plugins/plugin2.py             | 20 +++++---
 test-data/unit/plugins/type_anal_hook.py      | 31 ++++++------
 test-data/unit/plugins/union_method.py        | 32 ++++++++-----
 tox.ini                                       |  1 +
 31 files changed, 407 insertions(+), 238 deletions(-)

diff --git a/mypy/plugin.py b/mypy/plugin.py
index 4d62c2bd184b..38016191de8f 100644
--- a/mypy/plugin.py
+++ b/mypy/plugin.py
@@ -247,7 +247,7 @@ def fail(
 
     @abstractmethod
     def named_generic_type(self, name: str, args: list[Type]) -> Instance:
-        """Construct an instance of a builtin type with given type arguments."""
+        """Construct an instance of a generic type with given type arguments."""
         raise NotImplementedError
 
     @abstractmethod
diff --git a/test-data/unit/plugins/add_classmethod.py b/test-data/unit/plugins/add_classmethod.py
index 5aacc69a8f01..9bc2c4e079dd 100644
--- a/test-data/unit/plugins/add_classmethod.py
+++ b/test-data/unit/plugins/add_classmethod.py
@@ -1,4 +1,6 @@
-from typing import Callable, Optional
+from __future__ import annotations
+
+from typing import Callable
 
 from mypy.nodes import ARG_POS, Argument, Var
 from mypy.plugin import ClassDefContext, Plugin
@@ -7,7 +9,7 @@
 
 
 class ClassMethodPlugin(Plugin):
-    def get_base_class_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]:
+    def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
         if "BaseAddMethod" in fullname:
             return add_extra_methods_hook
         return None
@@ -24,5 +26,5 @@ def add_extra_methods_hook(ctx: ClassDefContext) -> None:
     )
 
 
-def plugin(version):
+def plugin(version: str) -> type[ClassMethodPlugin]:
     return ClassMethodPlugin
diff --git a/test-data/unit/plugins/arg_kinds.py b/test-data/unit/plugins/arg_kinds.py
index 5392e64c4f11..388a3c738b62 100644
--- a/test-data/unit/plugins/arg_kinds.py
+++ b/test-data/unit/plugins/arg_kinds.py
@@ -1,18 +1,19 @@
-from typing import Optional, Callable
-from mypy.plugin import Plugin, MethodContext, FunctionContext
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import FunctionContext, MethodContext, Plugin
 from mypy.types import Type
 
 
 class ArgKindsPlugin(Plugin):
-    def get_function_hook(self, fullname: str
-                          ) -> Optional[Callable[[FunctionContext], Type]]:
-        if 'func' in fullname:
+    def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
+        if "func" in fullname:
             return extract_arg_kinds_from_function
         return None
 
-    def get_method_hook(self, fullname: str
-                        ) -> Optional[Callable[[MethodContext], Type]]:
-        if 'Class.method' in fullname:
+    def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
+        if "Class.method" in fullname:
             return extract_arg_kinds_from_method
         return None
 
@@ -27,5 +28,5 @@ def extract_arg_kinds_from_method(ctx: MethodContext) -> Type:
     return ctx.default_return_type
 
 
-def plugin(version):
+def plugin(version: str) -> type[ArgKindsPlugin]:
     return ArgKindsPlugin
diff --git a/test-data/unit/plugins/arg_names.py b/test-data/unit/plugins/arg_names.py
index 6c1cbb9415cc..981c1a2eb12d 100644
--- a/test-data/unit/plugins/arg_names.py
+++ b/test-data/unit/plugins/arg_names.py
@@ -1,35 +1,51 @@
-from typing import Optional, Callable
+from __future__ import annotations
 
-from mypy.plugin import Plugin, MethodContext, FunctionContext
+from typing import Callable
+
+from mypy.nodes import StrExpr
+from mypy.plugin import FunctionContext, MethodContext, Plugin
 from mypy.types import Type
 
 
 class ArgNamesPlugin(Plugin):
-    def get_function_hook(self, fullname: str
-                          ) -> Optional[Callable[[FunctionContext], Type]]:
-        if fullname in {'mod.func', 'mod.func_unfilled', 'mod.func_star_expr',
-                        'mod.ClassInit', 'mod.Outer.NestedClassInit'}:
+    def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
+        if fullname in {
+            "mod.func",
+            "mod.func_unfilled",
+            "mod.func_star_expr",
+            "mod.ClassInit",
+            "mod.Outer.NestedClassInit",
+        }:
             return extract_classname_and_set_as_return_type_function
         return None
 
-    def get_method_hook(self, fullname: str
-                        ) -> Optional[Callable[[MethodContext], Type]]:
-        if fullname in {'mod.Class.method', 'mod.Class.myclassmethod', 'mod.Class.mystaticmethod',
-                        'mod.ClassUnfilled.method', 'mod.ClassStarExpr.method',
-                        'mod.ClassChild.method', 'mod.ClassChild.myclassmethod'}:
+    def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
+        if fullname in {
+            "mod.Class.method",
+            "mod.Class.myclassmethod",
+            "mod.Class.mystaticmethod",
+            "mod.ClassUnfilled.method",
+            "mod.ClassStarExpr.method",
+            "mod.ClassChild.method",
+            "mod.ClassChild.myclassmethod",
+        }:
             return extract_classname_and_set_as_return_type_method
         return None
 
 
 def extract_classname_and_set_as_return_type_function(ctx: FunctionContext) -> Type:
-    classname = ctx.args[ctx.callee_arg_names.index('classname')][0].value
-    return ctx.api.named_generic_type(classname, [])
+    arg = ctx.args[ctx.callee_arg_names.index("classname")][0]
+    if not isinstance(arg, StrExpr):
+        return ctx.default_return_type
+    return ctx.api.named_generic_type(arg.value, [])
 
 
 def extract_classname_and_set_as_return_type_method(ctx: MethodContext) -> Type:
-    classname = ctx.args[ctx.callee_arg_names.index('classname')][0].value
-    return ctx.api.named_generic_type(classname, [])
+    arg = ctx.args[ctx.callee_arg_names.index("classname")][0]
+    if not isinstance(arg, StrExpr):
+        return ctx.default_return_type
+    return ctx.api.named_generic_type(arg.value, [])
 
 
-def plugin(version):
+def plugin(version: str) -> type[ArgNamesPlugin]:
     return ArgNamesPlugin
diff --git a/test-data/unit/plugins/attrhook.py b/test-data/unit/plugins/attrhook.py
index c177072aa47f..9500734daa6c 100644
--- a/test-data/unit/plugins/attrhook.py
+++ b/test-data/unit/plugins/attrhook.py
@@ -1,12 +1,14 @@
-from typing import Optional, Callable
+from __future__ import annotations
 
-from mypy.plugin import Plugin, AttributeContext
-from mypy.types import Type, Instance
+from typing import Callable
+
+from mypy.plugin import AttributeContext, Plugin
+from mypy.types import Instance, Type
 
 
 class AttrPlugin(Plugin):
-    def get_attribute_hook(self, fullname: str) -> Optional[Callable[[AttributeContext], Type]]:
-        if fullname == 'm.Signal.__call__':
+    def get_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None:
+        if fullname == "m.Signal.__call__":
             return signal_call_callback
         return None
 
@@ -17,5 +19,5 @@ def signal_call_callback(ctx: AttributeContext) -> Type:
     return ctx.default_attr_type
 
 
-def plugin(version):
+def plugin(version: str) -> type[AttrPlugin]:
     return AttrPlugin
diff --git a/test-data/unit/plugins/attrhook2.py b/test-data/unit/plugins/attrhook2.py
index cc14341a6f97..2d41a0fdf52f 100644
--- a/test-data/unit/plugins/attrhook2.py
+++ b/test-data/unit/plugins/attrhook2.py
@@ -1,14 +1,16 @@
-from typing import Optional, Callable
+from __future__ import annotations
 
-from mypy.plugin import Plugin, AttributeContext
-from mypy.types import Type, AnyType, TypeOfAny
+from typing import Callable
+
+from mypy.plugin import AttributeContext, Plugin
+from mypy.types import AnyType, Type, TypeOfAny
 
 
 class AttrPlugin(Plugin):
-    def get_attribute_hook(self, fullname: str) -> Optional[Callable[[AttributeContext], Type]]:
-        if fullname == 'm.Magic.magic_field':
+    def get_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None:
+        if fullname == "m.Magic.magic_field":
             return magic_field_callback
-        if fullname == 'm.Magic.nonexistent_field':
+        if fullname == "m.Magic.nonexistent_field":
             return nonexistent_field_callback
         return None
 
@@ -22,5 +24,5 @@ def nonexistent_field_callback(ctx: AttributeContext) -> Type:
     return AnyType(TypeOfAny.from_error)
 
 
-def plugin(version):
+def plugin(version: str) -> type[AttrPlugin]:
     return AttrPlugin
diff --git a/test-data/unit/plugins/badreturn.py b/test-data/unit/plugins/badreturn.py
index fd7430606dd6..9dce3b3e99c2 100644
--- a/test-data/unit/plugins/badreturn.py
+++ b/test-data/unit/plugins/badreturn.py
@@ -1,2 +1,2 @@
-def plugin(version):
+def plugin(version: str) -> None:
     pass
diff --git a/test-data/unit/plugins/badreturn2.py b/test-data/unit/plugins/badreturn2.py
index c7e0447841c1..1ae551ecbf20 100644
--- a/test-data/unit/plugins/badreturn2.py
+++ b/test-data/unit/plugins/badreturn2.py
@@ -1,5 +1,9 @@
+from __future__ import annotations
+
+
 class MyPlugin:
     pass
 
-def plugin(version):
+
+def plugin(version: str) -> type[MyPlugin]:
     return MyPlugin
diff --git a/test-data/unit/plugins/callable_instance.py b/test-data/unit/plugins/callable_instance.py
index 40e7df418539..a9f562effb34 100644
--- a/test-data/unit/plugins/callable_instance.py
+++ b/test-data/unit/plugins/callable_instance.py
@@ -1,23 +1,30 @@
+from __future__ import annotations
+
+from typing import Callable
+
 from mypy.plugin import MethodContext, Plugin
 from mypy.types import Instance, Type
 
+
 class CallableInstancePlugin(Plugin):
-    def get_function_hook(self, fullname):
-        assert not fullname.endswith(' of Foo')
+    def get_function_hook(self, fullname: str) -> None:
+        assert not fullname.endswith(" of Foo")
 
-    def get_method_hook(self, fullname):
+    def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
         # Ensure that all names are fully qualified
-        assert not fullname.endswith(' of Foo')
+        assert not fullname.endswith(" of Foo")
 
-        if fullname == '__main__.Class.__call__':
+        if fullname == "__main__.Class.__call__":
             return my_hook
 
         return None
 
+
 def my_hook(ctx: MethodContext) -> Type:
     if isinstance(ctx.type, Instance) and len(ctx.type.args) == 1:
         return ctx.type.args[0]
     return ctx.default_return_type
 
-def plugin(version):
+
+def plugin(version: str) -> type[CallableInstancePlugin]:
     return CallableInstancePlugin
diff --git a/test-data/unit/plugins/class_attr_hook.py b/test-data/unit/plugins/class_attr_hook.py
index 348e5df0ee03..5d6a87df48bb 100644
--- a/test-data/unit/plugins/class_attr_hook.py
+++ b/test-data/unit/plugins/class_attr_hook.py
@@ -1,20 +1,23 @@
-from typing import Callable, Optional
+from __future__ import annotations
+
+from typing import Callable
 
 from mypy.plugin import AttributeContext, Plugin
 from mypy.types import Type as MypyType
 
 
 class ClassAttrPlugin(Plugin):
-    def get_class_attribute_hook(self, fullname: str
-                                 ) -> Optional[Callable[[AttributeContext], MypyType]]:
-        if fullname == '__main__.Cls.attr':
+    def get_class_attribute_hook(
+        self, fullname: str
+    ) -> Callable[[AttributeContext], MypyType] | None:
+        if fullname == "__main__.Cls.attr":
             return my_hook
         return None
 
 
 def my_hook(ctx: AttributeContext) -> MypyType:
-    return ctx.api.named_generic_type('builtins.int', [])
+    return ctx.api.named_generic_type("builtins.int", [])
 
 
-def plugin(_version: str):
+def plugin(_version: str) -> type[ClassAttrPlugin]:
     return ClassAttrPlugin
diff --git a/test-data/unit/plugins/class_callable.py b/test-data/unit/plugins/class_callable.py
index 07f75ec80ac1..9fab30e60458 100644
--- a/test-data/unit/plugins/class_callable.py
+++ b/test-data/unit/plugins/class_callable.py
@@ -1,32 +1,43 @@
-from mypy.plugin import Plugin
+from __future__ import annotations
+
+from typing import Callable
+
 from mypy.nodes import NameExpr
-from mypy.types import UnionType, NoneType, Instance
+from mypy.plugin import FunctionContext, Plugin
+from mypy.types import Instance, NoneType, Type, UnionType, get_proper_type
+
 
 class AttrPlugin(Plugin):
-    def get_function_hook(self, fullname):
-        if fullname.startswith('mod.Attr'):
+    def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
+        if fullname.startswith("mod.Attr"):
             return attr_hook
         return None
 
-def attr_hook(ctx):
-    assert isinstance(ctx.default_return_type, Instance)
-    if ctx.default_return_type.type.fullname == 'mod.Attr':
-        attr_base = ctx.default_return_type
+
+def attr_hook(ctx: FunctionContext) -> Type:
+    default = get_proper_type(ctx.default_return_type)
+    assert isinstance(default, Instance)
+    if default.type.fullname == "mod.Attr":
+        attr_base = default
     else:
         attr_base = None
-    for base in ctx.default_return_type.type.bases:
-        if base.type.fullname == 'mod.Attr':
+    for base in default.type.bases:
+        if base.type.fullname == "mod.Attr":
             attr_base = base
             break
     assert attr_base is not None
     last_arg_exprs = ctx.args[-1]
-    if any(isinstance(expr, NameExpr) and expr.name == 'True' for expr in last_arg_exprs):
+    if any(isinstance(expr, NameExpr) and expr.name == "True" for expr in last_arg_exprs):
         return attr_base
     assert len(attr_base.args) == 1
     arg_type = attr_base.args[0]
-    return Instance(attr_base.type, [UnionType([arg_type, NoneType()])],
-                    line=ctx.default_return_type.line,
-                    column=ctx.default_return_type.column)
+    return Instance(
+        attr_base.type,
+        [UnionType([arg_type, NoneType()])],
+        line=default.line,
+        column=default.column,
+    )
+
 
-def plugin(version):
+def plugin(version: str) -> type[AttrPlugin]:
     return AttrPlugin
diff --git a/test-data/unit/plugins/common_api_incremental.py b/test-data/unit/plugins/common_api_incremental.py
index 2dcd559777ec..b14b2f92073e 100644
--- a/test-data/unit/plugins/common_api_incremental.py
+++ b/test-data/unit/plugins/common_api_incremental.py
@@ -1,44 +1,48 @@
-from mypy.plugin import Plugin
-from mypy.nodes import (
-    ClassDef, Block, TypeInfo, SymbolTable, SymbolTableNode, MDEF, GDEF, Var
-)
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.nodes import GDEF, MDEF, Block, ClassDef, SymbolTable, SymbolTableNode, TypeInfo, Var
+from mypy.plugin import ClassDefContext, DynamicClassDefContext, Plugin
 
 
 class DynPlugin(Plugin):
-    def get_dynamic_class_hook(self, fullname):
-        if fullname == 'lib.declarative_base':
+    def get_dynamic_class_hook(
+        self, fullname: str
+    ) -> Callable[[DynamicClassDefContext], None] | None:
+        if fullname == "lib.declarative_base":
             return add_info_hook
         return None
 
-    def get_base_class_hook(self, fullname: str):
+    def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
         sym = self.lookup_fully_qualified(fullname)
         if sym and isinstance(sym.node, TypeInfo):
-            if sym.node.metadata.get('magic'):
+            if sym.node.metadata.get("magic"):
                 return add_magic_hook
         return None
 
 
-def add_info_hook(ctx) -> None:
+def add_info_hook(ctx: DynamicClassDefContext) -> None:
     class_def = ClassDef(ctx.name, Block([]))
     class_def.fullname = ctx.api.qualified_name(ctx.name)
 
     info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id)
     class_def.info = info
-    obj = ctx.api.named_type('builtins.object')
+    obj = ctx.api.named_type("builtins.object", [])
     info.mro = [info, obj.type]
     info.bases = [obj]
     ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
-    info.metadata['magic'] = True
+    info.metadata["magic"] = {"value": True}
 
 
-def add_magic_hook(ctx) -> None:
+def add_magic_hook(ctx: ClassDefContext) -> None:
     info = ctx.cls.info
-    str_type = ctx.api.named_type_or_none('builtins.str', [])
+    str_type = ctx.api.named_type_or_none("builtins.str", [])
     assert str_type is not None
-    var = Var('__magic__', str_type)
+    var = Var("__magic__", str_type)
     var.info = info
-    info.names['__magic__'] = SymbolTableNode(MDEF, var)
+    info.names["__magic__"] = SymbolTableNode(MDEF, var)
 
 
-def plugin(version):
+def plugin(version: str) -> type[DynPlugin]:
     return DynPlugin
diff --git a/test-data/unit/plugins/config_data.py b/test-data/unit/plugins/config_data.py
index 059e036d5e32..9b828bc9ac0a 100644
--- a/test-data/unit/plugins/config_data.py
+++ b/test-data/unit/plugins/config_data.py
@@ -1,6 +1,7 @@
-import os
-import json
+from __future__ import annotations
 
+import json
+import os
 from typing import Any
 
 from mypy.plugin import Plugin, ReportConfigContext
@@ -8,11 +9,11 @@
 
 class ConfigDataPlugin(Plugin):
     def report_config_data(self, ctx: ReportConfigContext) -> Any:
-        path = os.path.join('tmp/test.json')
+        path = os.path.join("tmp/test.json")
         with open(path) as f:
             data = json.load(f)
         return data.get(ctx.id)
 
 
-def plugin(version):
+def plugin(version: str) -> type[ConfigDataPlugin]:
     return ConfigDataPlugin
diff --git a/test-data/unit/plugins/custom_errorcode.py b/test-data/unit/plugins/custom_errorcode.py
index 0e2209a32eca..0af87658e59f 100644
--- a/test-data/unit/plugins/custom_errorcode.py
+++ b/test-data/unit/plugins/custom_errorcode.py
@@ -1,20 +1,24 @@
+from __future__ import annotations
+
+from typing import Callable
+
 from mypy.errorcodes import ErrorCode
-from mypy.plugin import Plugin
-from mypy.types import AnyType, TypeOfAny
+from mypy.plugin import FunctionContext, Plugin
+from mypy.types import AnyType, Type, TypeOfAny
 
 CUSTOM_ERROR = ErrorCode(code="custom", description="", category="Custom")
 
 
 class CustomErrorCodePlugin(Plugin):
-    def get_function_hook(self, fullname):
+    def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
         if fullname.endswith(".main"):
             return self.emit_error
         return None
 
-    def emit_error(self, ctx):
+    def emit_error(self, ctx: FunctionContext) -> Type:
         ctx.api.fail("Custom error", ctx.context, code=CUSTOM_ERROR)
         return AnyType(TypeOfAny.from_error)
 
 
-def plugin(version):
+def plugin(version: str) -> type[CustomErrorCodePlugin]:
     return CustomErrorCodePlugin
diff --git a/test-data/unit/plugins/customentry.py b/test-data/unit/plugins/customentry.py
index b3dacfd4cf44..1a7ed3348e12 100644
--- a/test-data/unit/plugins/customentry.py
+++ b/test-data/unit/plugins/customentry.py
@@ -1,14 +1,22 @@
-from mypy.plugin import Plugin
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import FunctionContext, Plugin
+from mypy.types import Type
+
 
 class MyPlugin(Plugin):
-    def get_function_hook(self, fullname):
-        if fullname == '__main__.f':
+    def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
+        if fullname == "__main__.f":
             return my_hook
         assert fullname
         return None
 
-def my_hook(ctx):
-    return ctx.api.named_generic_type('builtins.int', [])
 
-def register(version):
+def my_hook(ctx: FunctionContext) -> Type:
+    return ctx.api.named_generic_type("builtins.int", [])
+
+
+def register(version: str) -> type[MyPlugin]:
     return MyPlugin
diff --git a/test-data/unit/plugins/customize_mro.py b/test-data/unit/plugins/customize_mro.py
index 0f2396d98965..3b13b2e9d998 100644
--- a/test-data/unit/plugins/customize_mro.py
+++ b/test-data/unit/plugins/customize_mro.py
@@ -1,10 +1,17 @@
-from mypy.plugin import Plugin
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import ClassDefContext, Plugin
+
 
 class DummyPlugin(Plugin):
-    def get_customize_class_mro_hook(self, fullname):
-        def analyze(classdef_ctx):
+    def get_customize_class_mro_hook(self, fullname: str) -> Callable[[ClassDefContext], None]:
+        def analyze(classdef_ctx: ClassDefContext) -> None:
             pass
+
         return analyze
 
-def plugin(version):
+
+def plugin(version: str) -> type[DummyPlugin]:
     return DummyPlugin
diff --git a/test-data/unit/plugins/decimal_to_int.py b/test-data/unit/plugins/decimal_to_int.py
index 94aa33ef6df1..2318b2367d33 100644
--- a/test-data/unit/plugins/decimal_to_int.py
+++ b/test-data/unit/plugins/decimal_to_int.py
@@ -1,14 +1,21 @@
-from mypy.plugin import Plugin
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import AnalyzeTypeContext, Plugin
+from mypy.types import Type
 
 
 class MyPlugin(Plugin):
-    def get_type_analyze_hook(self, fullname):
+    def get_type_analyze_hook(self, fullname: str) -> Callable[[AnalyzeTypeContext], Type] | None:
         if fullname in ("decimal.Decimal", "_decimal.Decimal"):
             return decimal_to_int_hook
         return None
 
-def plugin(version):
-    return MyPlugin
 
-def decimal_to_int_hook(ctx):
-    return ctx.api.named_type('builtins.int', [])
+def decimal_to_int_hook(ctx: AnalyzeTypeContext) -> Type:
+    return ctx.api.named_type("builtins.int", [])
+
+
+def plugin(version: str) -> type[MyPlugin]:
+    return MyPlugin
diff --git a/test-data/unit/plugins/depshook.py b/test-data/unit/plugins/depshook.py
index 76277f3cb82b..bb2460de1196 100644
--- a/test-data/unit/plugins/depshook.py
+++ b/test-data/unit/plugins/depshook.py
@@ -1,15 +1,15 @@
-from typing import List, Tuple
+from __future__ import annotations
 
-from mypy.plugin import Plugin
 from mypy.nodes import MypyFile
+from mypy.plugin import Plugin
 
 
 class DepsPlugin(Plugin):
-    def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]:
-        if file.fullname == '__main__':
-            return [(10, 'err', -1)]
+    def get_additional_deps(self, file: MypyFile) -> list[tuple[int, str, int]]:
+        if file.fullname == "__main__":
+            return [(10, "err", -1)]
         return []
 
 
-def plugin(version):
+def plugin(version: str) -> type[DepsPlugin]:
     return DepsPlugin
diff --git a/test-data/unit/plugins/descriptor.py b/test-data/unit/plugins/descriptor.py
index afbadcdfb671..d38853367906 100644
--- a/test-data/unit/plugins/descriptor.py
+++ b/test-data/unit/plugins/descriptor.py
@@ -1,28 +1,38 @@
-from mypy.plugin import Plugin
-from mypy.types import NoneType, CallableType
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import MethodContext, MethodSigContext, Plugin
+from mypy.types import CallableType, NoneType, Type, get_proper_type
 
 
 class DescriptorPlugin(Plugin):
-    def get_method_hook(self, fullname):
+    def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
         if fullname == "__main__.Desc.__get__":
             return get_hook
         return None
 
-    def get_method_signature_hook(self, fullname):
+    def get_method_signature_hook(
+        self, fullname: str
+    ) -> Callable[[MethodSigContext], CallableType] | None:
         if fullname == "__main__.Desc.__set__":
             return set_hook
         return None
 
 
-def get_hook(ctx):
-    if isinstance(ctx.arg_types[0][0], NoneType):
-        return ctx.api.named_type("builtins.str")
-    return ctx.api.named_type("builtins.int")
+def get_hook(ctx: MethodContext) -> Type:
+    arg = get_proper_type(ctx.arg_types[0][0])
+    if isinstance(arg, NoneType):
+        return ctx.api.named_generic_type("builtins.str", [])
+    return ctx.api.named_generic_type("builtins.int", [])
 
 
-def set_hook(ctx):
+def set_hook(ctx: MethodSigContext) -> CallableType:
     return CallableType(
-        [ctx.api.named_type("__main__.Cls"), ctx.api.named_type("builtins.int")],
+        [
+            ctx.api.named_generic_type("__main__.Cls", []),
+            ctx.api.named_generic_type("builtins.int", []),
+        ],
         ctx.default_signature.arg_kinds,
         ctx.default_signature.arg_names,
         ctx.default_signature.ret_type,
@@ -30,5 +40,5 @@ def set_hook(ctx):
     )
 
 
-def plugin(version):
+def plugin(version: str) -> type[DescriptorPlugin]:
     return DescriptorPlugin
diff --git a/test-data/unit/plugins/dyn_class.py b/test-data/unit/plugins/dyn_class.py
index 54bf377aa8ef..18e948e3dd2a 100644
--- a/test-data/unit/plugins/dyn_class.py
+++ b/test-data/unit/plugins/dyn_class.py
@@ -1,47 +1,57 @@
-from mypy.plugin import Plugin
-from mypy.nodes import (
-    ClassDef, Block, TypeInfo, SymbolTable, SymbolTableNode, GDEF, Var
-)
-from mypy.types import Instance
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.nodes import GDEF, Block, ClassDef, SymbolTable, SymbolTableNode, TypeInfo, Var
+from mypy.plugin import ClassDefContext, DynamicClassDefContext, Plugin
+from mypy.types import Instance, get_proper_type
 
 DECL_BASES = set()
 
+
 class DynPlugin(Plugin):
-    def get_dynamic_class_hook(self, fullname):
-        if fullname == 'mod.declarative_base':
+    def get_dynamic_class_hook(
+        self, fullname: str
+    ) -> Callable[[DynamicClassDefContext], None] | None:
+        if fullname == "mod.declarative_base":
             return add_info_hook
         return None
 
-    def get_base_class_hook(self, fullname: str):
+    def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
         if fullname in DECL_BASES:
             return replace_col_hook
         return None
 
-def add_info_hook(ctx):
+
+def add_info_hook(ctx: DynamicClassDefContext) -> None:
     class_def = ClassDef(ctx.name, Block([]))
     class_def.fullname = ctx.api.qualified_name(ctx.name)
 
     info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id)
     class_def.info = info
-    obj = ctx.api.named_type('builtins.object')
+    obj = ctx.api.named_type("builtins.object")
     info.mro = [info, obj.type]
     info.bases = [obj]
     ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
     DECL_BASES.add(class_def.fullname)
 
-def replace_col_hook(ctx):
+
+def replace_col_hook(ctx: ClassDefContext) -> None:
     info = ctx.cls.info
     for sym in info.names.values():
         node = sym.node
-        if isinstance(node, Var) and isinstance(node.type, Instance):
-            if node.type.type.fullname == 'mod.Column':
-                new_sym = ctx.api.lookup_fully_qualified_or_none('mod.Instr')
+        if isinstance(node, Var) and isinstance(
+            (node_type := get_proper_type(node.type)), Instance
+        ):
+            if node_type.type.fullname == "mod.Column":
+                new_sym = ctx.api.lookup_fully_qualified_or_none("mod.Instr")
                 if new_sym:
                     new_info = new_sym.node
                     assert isinstance(new_info, TypeInfo)
-                    node.type = Instance(new_info, node.type.args,
-                                         node.type.line,
-                                         node.type.column)
+                    node.type = Instance(
+                        new_info, node_type.args, node_type.line, node_type.column
+                    )
+
 
-def plugin(version):
+def plugin(version: str) -> type[DynPlugin]:
     return DynPlugin
diff --git a/test-data/unit/plugins/dyn_class_from_method.py b/test-data/unit/plugins/dyn_class_from_method.py
index 4c3904907750..b84754654084 100644
--- a/test-data/unit/plugins/dyn_class_from_method.py
+++ b/test-data/unit/plugins/dyn_class_from_method.py
@@ -1,28 +1,38 @@
-from mypy.nodes import (Block, ClassDef, GDEF, SymbolTable, SymbolTableNode, TypeInfo)
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.nodes import GDEF, Block, ClassDef, RefExpr, SymbolTable, SymbolTableNode, TypeInfo
 from mypy.plugin import DynamicClassDefContext, Plugin
 from mypy.types import Instance
 
 
 class DynPlugin(Plugin):
-    def get_dynamic_class_hook(self, fullname):
-        if 'from_queryset' in fullname:
+    def get_dynamic_class_hook(
+        self, fullname: str
+    ) -> Callable[[DynamicClassDefContext], None] | None:
+        if "from_queryset" in fullname:
             return add_info_hook
         return None
 
 
-def add_info_hook(ctx: DynamicClassDefContext):
+def add_info_hook(ctx: DynamicClassDefContext) -> None:
     class_def = ClassDef(ctx.name, Block([]))
     class_def.fullname = ctx.api.qualified_name(ctx.name)
 
     info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id)
     class_def.info = info
+    assert isinstance(ctx.call.args[0], RefExpr)
     queryset_type_fullname = ctx.call.args[0].fullname
-    queryset_info = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname).node  # type: TypeInfo
-    obj = ctx.api.named_type('builtins.object')
+    queryset_node = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname)
+    assert queryset_node is not None
+    queryset_info = queryset_node.node
+    assert isinstance(queryset_info, TypeInfo)
+    obj = ctx.api.named_type("builtins.object")
     info.mro = [info, queryset_info, obj.type]
     info.bases = [Instance(queryset_info, [])]
     ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
 
 
-def plugin(version):
+def plugin(version: str) -> type[DynPlugin]:
     return DynPlugin
diff --git a/test-data/unit/plugins/fnplugin.py b/test-data/unit/plugins/fnplugin.py
index 684d6343458e..a5a7e57101c2 100644
--- a/test-data/unit/plugins/fnplugin.py
+++ b/test-data/unit/plugins/fnplugin.py
@@ -1,14 +1,22 @@
-from mypy.plugin import Plugin
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import FunctionContext, Plugin
+from mypy.types import Type
+
 
 class MyPlugin(Plugin):
-    def get_function_hook(self, fullname):
-        if fullname == '__main__.f':
+    def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
+        if fullname == "__main__.f":
             return my_hook
         assert fullname is not None
         return None
 
-def my_hook(ctx):
-    return ctx.api.named_generic_type('builtins.int', [])
 
-def plugin(version):
+def my_hook(ctx: FunctionContext) -> Type:
+    return ctx.api.named_generic_type("builtins.int", [])
+
+
+def plugin(version: str) -> type[MyPlugin]:
     return MyPlugin
diff --git a/test-data/unit/plugins/fully_qualified_test_hook.py b/test-data/unit/plugins/fully_qualified_test_hook.py
index 529cf25a1215..9230091bba1a 100644
--- a/test-data/unit/plugins/fully_qualified_test_hook.py
+++ b/test-data/unit/plugins/fully_qualified_test_hook.py
@@ -1,16 +1,28 @@
-from mypy.plugin import CallableType, MethodSigContext, Plugin
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import MethodSigContext, Plugin
+from mypy.types import CallableType
+
 
 class FullyQualifiedTestPlugin(Plugin):
-    def get_method_signature_hook(self, fullname):
+    def get_method_signature_hook(
+        self, fullname: str
+    ) -> Callable[[MethodSigContext], CallableType] | None:
         # Ensure that all names are fully qualified
-        if 'FullyQualifiedTest' in fullname:
-            assert fullname.startswith('__main__.') and not ' of ' in fullname, fullname
+        if "FullyQualifiedTest" in fullname:
+            assert fullname.startswith("__main__.") and " of " not in fullname, fullname
             return my_hook
 
         return None
 
+
 def my_hook(ctx: MethodSigContext) -> CallableType:
-    return ctx.default_signature.copy_modified(ret_type=ctx.api.named_generic_type('builtins.int', []))
+    return ctx.default_signature.copy_modified(
+        ret_type=ctx.api.named_generic_type("builtins.int", [])
+    )
+
 
-def plugin(version):
+def plugin(version: str) -> type[FullyQualifiedTestPlugin]:
     return FullyQualifiedTestPlugin
diff --git a/test-data/unit/plugins/function_sig_hook.py b/test-data/unit/plugins/function_sig_hook.py
index 4d901b96716e..a8d3cf058062 100644
--- a/test-data/unit/plugins/function_sig_hook.py
+++ b/test-data/unit/plugins/function_sig_hook.py
@@ -1,9 +1,16 @@
-from mypy.plugin import CallableType, FunctionSigContext, Plugin
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import FunctionSigContext, Plugin
+from mypy.types import CallableType
 
 
 class FunctionSigPlugin(Plugin):
-    def get_function_signature_hook(self, fullname):
-        if fullname == '__main__.dynamic_signature':
+    def get_function_signature_hook(
+        self, fullname: str
+    ) -> Callable[[FunctionSigContext], CallableType] | None:
+        if fullname == "__main__.dynamic_signature":
             return my_hook
         return None
 
@@ -13,11 +20,8 @@ def my_hook(ctx: FunctionSigContext) -> CallableType:
     if len(arg1_args) != 1:
         return ctx.default_signature
     arg1_type = ctx.api.get_expression_type(arg1_args[0])
-    return ctx.default_signature.copy_modified(
-        arg_types=[arg1_type],
-        ret_type=arg1_type,
-    )
+    return ctx.default_signature.copy_modified(arg_types=[arg1_type], ret_type=arg1_type)
 
 
-def plugin(version):
+def plugin(version: str) -> type[FunctionSigPlugin]:
     return FunctionSigPlugin
diff --git a/test-data/unit/plugins/method_in_decorator.py b/test-data/unit/plugins/method_in_decorator.py
index 99774dfcc7ef..3fba7692266c 100644
--- a/test-data/unit/plugins/method_in_decorator.py
+++ b/test-data/unit/plugins/method_in_decorator.py
@@ -1,19 +1,25 @@
-from mypy.types import CallableType, Type
-from typing import Callable, Optional
+from __future__ import annotations
+
+from typing import Callable
+
 from mypy.plugin import MethodContext, Plugin
+from mypy.types import CallableType, Type, get_proper_type
 
 
 class MethodDecoratorPlugin(Plugin):
-    def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]:
-        if 'Foo.a' in fullname:
+    def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
+        if "Foo.a" in fullname:
             return method_decorator_callback
         return None
 
+
 def method_decorator_callback(ctx: MethodContext) -> Type:
-    if isinstance(ctx.default_return_type, CallableType):
-        str_type = ctx.api.named_generic_type('builtins.str', [])
-        return ctx.default_return_type.copy_modified(ret_type=str_type)
+    default = get_proper_type(ctx.default_return_type)
+    if isinstance(default, CallableType):
+        str_type = ctx.api.named_generic_type("builtins.str", [])
+        return default.copy_modified(ret_type=str_type)
     return ctx.default_return_type
 
-def plugin(version):
+
+def plugin(version: str) -> type[MethodDecoratorPlugin]:
     return MethodDecoratorPlugin
diff --git a/test-data/unit/plugins/method_sig_hook.py b/test-data/unit/plugins/method_sig_hook.py
index 25c2842e6620..b78831cc45d5 100644
--- a/test-data/unit/plugins/method_sig_hook.py
+++ b/test-data/unit/plugins/method_sig_hook.py
@@ -1,30 +1,41 @@
-from mypy.plugin import CallableType, CheckerPluginInterface, MethodSigContext, Plugin
-from mypy.types import Instance, Type
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import CheckerPluginInterface, MethodSigContext, Plugin
+from mypy.types import CallableType, Instance, Type, get_proper_type
+
 
 class MethodSigPlugin(Plugin):
-    def get_method_signature_hook(self, fullname):
+    def get_method_signature_hook(
+        self, fullname: str
+    ) -> Callable[[MethodSigContext], CallableType] | None:
         # Ensure that all names are fully qualified
-        assert not fullname.endswith(' of Foo')
+        assert not fullname.endswith(" of Foo")
 
-        if fullname.startswith('__main__.Foo.'):
+        if fullname.startswith("__main__.Foo."):
             return my_hook
 
         return None
 
+
 def _str_to_int(api: CheckerPluginInterface, typ: Type) -> Type:
+    typ = get_proper_type(typ)
     if isinstance(typ, Instance):
-        if typ.type.fullname == 'builtins.str':
-            return api.named_generic_type('builtins.int', [])
+        if typ.type.fullname == "builtins.str":
+            return api.named_generic_type("builtins.int", [])
         elif typ.args:
             return typ.copy_modified(args=[_str_to_int(api, t) for t in typ.args])
 
     return typ
 
+
 def my_hook(ctx: MethodSigContext) -> CallableType:
     return ctx.default_signature.copy_modified(
         arg_types=[_str_to_int(ctx.api, t) for t in ctx.default_signature.arg_types],
         ret_type=_str_to_int(ctx.api, ctx.default_signature.ret_type),
     )
 
-def plugin(version):
+
+def plugin(version: str) -> type[MethodSigPlugin]:
     return MethodSigPlugin
diff --git a/test-data/unit/plugins/named_callable.py b/test-data/unit/plugins/named_callable.py
index e40d181d2bad..c37e11c32125 100644
--- a/test-data/unit/plugins/named_callable.py
+++ b/test-data/unit/plugins/named_callable.py
@@ -1,28 +1,33 @@
-from mypy.plugin import Plugin
-from mypy.types import CallableType
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import FunctionContext, Plugin
+from mypy.types import CallableType, Type, get_proper_type
 
 
 class MyPlugin(Plugin):
-    def get_function_hook(self, fullname):
-        if fullname == 'm.decorator1':
+    def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
+        if fullname == "m.decorator1":
             return decorator_call_hook
-        if fullname == 'm._decorated':  # This is a dummy name generated by the plugin
+        if fullname == "m._decorated":  # This is a dummy name generated by the plugin
             return decorate_hook
         return None
 
 
-def decorator_call_hook(ctx):
-    if isinstance(ctx.default_return_type, CallableType):
-        return ctx.default_return_type.copy_modified(name='m._decorated')
+def decorator_call_hook(ctx: FunctionContext) -> Type:
+    default = get_proper_type(ctx.default_return_type)
+    if isinstance(default, CallableType):
+        return default.copy_modified(name="m._decorated")
     return ctx.default_return_type
 
 
-def decorate_hook(ctx):
-    if isinstance(ctx.default_return_type, CallableType):
-        return ctx.default_return_type.copy_modified(
-            ret_type=ctx.api.named_generic_type('builtins.str', []))
+def decorate_hook(ctx: FunctionContext) -> Type:
+    default = get_proper_type(ctx.default_return_type)
+    if isinstance(default, CallableType):
+        return default.copy_modified(ret_type=ctx.api.named_generic_type("builtins.str", []))
     return ctx.default_return_type
 
 
-def plugin(version):
+def plugin(version: str) -> type[MyPlugin]:
     return MyPlugin
diff --git a/test-data/unit/plugins/plugin2.py b/test-data/unit/plugins/plugin2.py
index b530a62d23aa..e486d96ea8bf 100644
--- a/test-data/unit/plugins/plugin2.py
+++ b/test-data/unit/plugins/plugin2.py
@@ -1,13 +1,21 @@
-from mypy.plugin import Plugin
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import FunctionContext, Plugin
+from mypy.types import Type
+
 
 class Plugin2(Plugin):
-    def get_function_hook(self, fullname):
-        if fullname in ('__main__.f', '__main__.g'):
+    def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
+        if fullname in ("__main__.f", "__main__.g"):
             return str_hook
         return None
 
-def str_hook(ctx):
-    return ctx.api.named_generic_type('builtins.str', [])
 
-def plugin(version):
+def str_hook(ctx: FunctionContext) -> Type:
+    return ctx.api.named_generic_type("builtins.str", [])
+
+
+def plugin(version: str) -> type[Plugin2]:
     return Plugin2
diff --git a/test-data/unit/plugins/type_anal_hook.py b/test-data/unit/plugins/type_anal_hook.py
index 86d18d8c8611..c380bbe873fe 100644
--- a/test-data/unit/plugins/type_anal_hook.py
+++ b/test-data/unit/plugins/type_anal_hook.py
@@ -1,22 +1,23 @@
-from typing import Optional, Callable
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import AnalyzeTypeContext, Plugin
 
-from mypy.plugin import Plugin, AnalyzeTypeContext
-from mypy.types import Type, TypeList, AnyType, CallableType, TypeOfAny
 # The official name changed to NoneType but we have an alias for plugin compat reasons
 # so we'll keep testing that here.
-from mypy.types import NoneTyp
+from mypy.types import AnyType, CallableType, NoneTyp, Type, TypeList, TypeOfAny
+
 
 class TypeAnalyzePlugin(Plugin):
-    def get_type_analyze_hook(self, fullname: str
-                              ) -> Optional[Callable[[AnalyzeTypeContext], Type]]:
-        if fullname == 'm.Signal':
+    def get_type_analyze_hook(self, fullname: str) -> Callable[[AnalyzeTypeContext], Type] | None:
+        if fullname == "m.Signal":
             return signal_type_analyze_callback
         return None
 
 
 def signal_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:
-    if (len(ctx.type.args) != 1
-            or not isinstance(ctx.type.args[0], TypeList)):
+    if len(ctx.type.args) != 1 or not isinstance(ctx.type.args[0], TypeList):
         ctx.api.fail('Invalid "Signal" type (expected "Signal[[t, ...]]")', ctx.context)
         return AnyType(TypeOfAny.from_error)
 
@@ -27,13 +28,11 @@ def signal_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type:
         return AnyType(TypeOfAny.from_error)  # Error generated elsewhere
     arg_types, arg_kinds, arg_names = analyzed
     arg_types = [ctx.api.analyze_type(arg) for arg in arg_types]
-    type_arg = CallableType(arg_types,
-                            arg_kinds,
-                            arg_names,
-                            NoneTyp(),
-                            ctx.api.named_type('builtins.function', []))
-    return ctx.api.named_type('m.Signal', [type_arg])
+    type_arg = CallableType(
+        arg_types, arg_kinds, arg_names, NoneTyp(), ctx.api.named_type("builtins.function", [])
+    )
+    return ctx.api.named_type("m.Signal", [type_arg])
 
 
-def plugin(version):
+def plugin(version: str) -> type[TypeAnalyzePlugin]:
     return TypeAnalyzePlugin
diff --git a/test-data/unit/plugins/union_method.py b/test-data/unit/plugins/union_method.py
index a7621553f6ad..7c62ffb8c0cc 100644
--- a/test-data/unit/plugins/union_method.py
+++ b/test-data/unit/plugins/union_method.py
@@ -1,34 +1,40 @@
-from mypy.plugin import (
-    CallableType, CheckerPluginInterface, MethodSigContext, MethodContext, Plugin
-)
-from mypy.types import Instance, Type
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.plugin import CheckerPluginInterface, MethodContext, MethodSigContext, Plugin
+from mypy.types import CallableType, Instance, Type, get_proper_type
 
 
 class MethodPlugin(Plugin):
-    def get_method_signature_hook(self, fullname):
-        if fullname.startswith('__main__.Foo.'):
+    def get_method_signature_hook(
+        self, fullname: str
+    ) -> Callable[[MethodSigContext], CallableType] | None:
+        if fullname.startswith("__main__.Foo."):
             return my_meth_sig_hook
         return None
 
-    def get_method_hook(self, fullname):
-        if fullname.startswith('__main__.Bar.'):
+    def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
+        if fullname.startswith("__main__.Bar."):
             return my_meth_hook
         return None
 
 
 def _str_to_int(api: CheckerPluginInterface, typ: Type) -> Type:
+    typ = get_proper_type(typ)
     if isinstance(typ, Instance):
-        if typ.type.fullname == 'builtins.str':
-            return api.named_generic_type('builtins.int', [])
+        if typ.type.fullname == "builtins.str":
+            return api.named_generic_type("builtins.int", [])
         elif typ.args:
             return typ.copy_modified(args=[_str_to_int(api, t) for t in typ.args])
     return typ
 
 
 def _float_to_int(api: CheckerPluginInterface, typ: Type) -> Type:
+    typ = get_proper_type(typ)
     if isinstance(typ, Instance):
-        if typ.type.fullname == 'builtins.float':
-            return api.named_generic_type('builtins.int', [])
+        if typ.type.fullname == "builtins.float":
+            return api.named_generic_type("builtins.int", [])
         elif typ.args:
             return typ.copy_modified(args=[_float_to_int(api, t) for t in typ.args])
     return typ
@@ -45,5 +51,5 @@ def my_meth_hook(ctx: MethodContext) -> Type:
     return _float_to_int(ctx.api, ctx.default_return_type)
 
 
-def plugin(version):
+def plugin(version: str) -> type[MethodPlugin]:
     return MethodPlugin
diff --git a/tox.ini b/tox.ini
index a809c4d2c570..e07acdc5200d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -55,3 +55,4 @@ passenv =
 commands =
     python runtests.py self
     python -m mypy --config-file mypy_self_check.ini misc --exclude misc/sync-typeshed.py
+    python -m mypy --config-file mypy_self_check.ini test-data/unit/plugins

From bd212bcc2229779c0f6c96b16bf9d685e98884c1 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Mon, 4 Sep 2023 18:43:05 +0300
Subject: [PATCH 108/288] Remove type aliases that are long supported (#16039)

Some builtin aliases are available for all python versions that we
support. So, there's no need to check them in `semanal`:


https://github.com/python/mypy/blob/8738886861682e0d168ea321c2cc6ee5b566cb8b/mypy/semanal.py#L673-L689

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 mypy/nodes.py   | 13 +------------
 mypy/semanal.py |  5 ++++-
 2 files changed, 5 insertions(+), 13 deletions(-)

diff --git a/mypy/nodes.py b/mypy/nodes.py
index db42dd6b3949..d29e99ccace7 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -138,18 +138,7 @@ def set_line(
 
 # This keeps track of the oldest supported Python version where the corresponding
 # alias source is available.
-type_aliases_source_versions: Final = {
-    "typing.List": (2, 7),
-    "typing.Dict": (2, 7),
-    "typing.Set": (2, 7),
-    "typing.FrozenSet": (2, 7),
-    "typing.ChainMap": (3, 3),
-    "typing.Counter": (2, 7),
-    "typing.DefaultDict": (2, 7),
-    "typing.Deque": (2, 7),
-    "typing.OrderedDict": (3, 7),
-    "typing.LiteralString": (3, 11),
-}
+type_aliases_source_versions: Final = {"typing.LiteralString": (3, 11)}
 
 # This keeps track of aliases in `typing_extensions`, which we treat specially.
 typing_extensions_aliases: Final = {
diff --git a/mypy/semanal.py b/mypy/semanal.py
index be7e733a0816..ec4d32aefeb9 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -680,7 +680,10 @@ def add_builtin_aliases(self, tree: MypyFile) -> None:
         """
         assert tree.fullname == "typing"
         for alias, target_name in type_aliases.items():
-            if type_aliases_source_versions[alias] > self.options.python_version:
+            if (
+                alias in type_aliases_source_versions
+                and type_aliases_source_versions[alias] > self.options.python_version
+            ):
                 # This alias is not available on this Python version.
                 continue
             name = alias.split(".")[-1]

From c712079e1cbd74e2ea37da02d66152810fb69903 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Mon, 4 Sep 2023 18:44:51 +0300
Subject: [PATCH 109/288] Do not use deprecated `add_method` in `attrs` plugin
 (#16037)

CC @ikonst

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 mypy/plugins/attrs.py | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py
index 6f5b6f35da07..3ddc234a7e4a 100644
--- a/mypy/plugins/attrs.py
+++ b/mypy/plugins/attrs.py
@@ -51,7 +51,7 @@
     _get_bool_argument,
     _get_decorator_bool_argument,
     add_attribute_to_class,
-    add_method,
+    add_method_to_class,
     deserialize_and_fixup_type,
 )
 from mypy.server.trigger import make_wildcard_trigger
@@ -952,7 +952,9 @@ def add_method(
         tvd: If the method is generic these should be the type variables.
         """
         self_type = self_type if self_type is not None else self.self_type
-        add_method(self.ctx, method_name, args, ret_type, self_type, tvd)
+        add_method_to_class(
+            self.ctx.api, self.ctx.cls, method_name, args, ret_type, self_type, tvd
+        )
 
 
 def _get_attrs_init_type(typ: Instance) -> CallableType | None:

From 4496a005a84f7daedc1ef2e801583127f5995f75 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Mon, 4 Sep 2023 19:28:00 +0300
Subject: [PATCH 110/288] Use latest `actions/checkout@v4` (#16042)

Looks like recent CI failures are related.
Release docs: https://github.com/actions/checkout/releases/tag/v4.0.0
---
 .github/workflows/build_wheels.yml  | 2 +-
 .github/workflows/docs.yml          | 2 +-
 .github/workflows/mypy_primer.yml   | 2 +-
 .github/workflows/sync_typeshed.yml | 2 +-
 .github/workflows/test.yml          | 4 ++--
 .github/workflows/test_stubgenc.yml | 2 +-
 6 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml
index 3f4ea5e42f9b..f1438279673d 100644
--- a/.github/workflows/build_wheels.yml
+++ b/.github/workflows/build_wheels.yml
@@ -13,7 +13,7 @@ jobs:
     if: github.repository == 'python/mypy'
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/checkout@v3
+      - uses: actions/checkout@v4
       - uses: actions/setup-python@v4
         with:
           python-version: '3.11'
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 037738d4b3aa..6c53afb9aa7c 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -29,7 +29,7 @@ jobs:
       TOX_SKIP_MISSING_INTERPRETERS: False
       VERIFY_MYPY_ERROR_CODES: 1
     steps:
-      - uses: actions/checkout@v3
+      - uses: actions/checkout@v4
       - uses: actions/setup-python@v4
         with:
           python-version: '3.8'
diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml
index 2958b8fc325b..f8991e27970a 100644
--- a/.github/workflows/mypy_primer.yml
+++ b/.github/workflows/mypy_primer.yml
@@ -33,7 +33,7 @@ jobs:
         shard-index: [0, 1, 2, 3, 4]
       fail-fast: false
     steps:
-      - uses: actions/checkout@v3
+      - uses: actions/checkout@v4
         with:
           path: mypy_to_test
           fetch-depth: 0
diff --git a/.github/workflows/sync_typeshed.yml b/.github/workflows/sync_typeshed.yml
index 1db2e846f099..de9e0aad599f 100644
--- a/.github/workflows/sync_typeshed.yml
+++ b/.github/workflows/sync_typeshed.yml
@@ -15,7 +15,7 @@ jobs:
     if: github.repository == 'python/mypy'
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/checkout@v3
+      - uses: actions/checkout@v4
         with:
           fetch-depth: 0
         # TODO: use whatever solution ends up working for
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index d2e7e7258500..3bcd9e059589 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -119,7 +119,7 @@ jobs:
       # Pytest
       PYTEST_ADDOPTS: --color=yes
     steps:
-    - uses: actions/checkout@v3
+    - uses: actions/checkout@v4
     - uses: actions/setup-python@v4
       with:
         python-version: ${{ matrix.python }}
@@ -162,7 +162,7 @@ jobs:
       CXX: i686-linux-gnu-g++
       CC: i686-linux-gnu-gcc
     steps:
-      - uses: actions/checkout@v3
+      - uses: actions/checkout@v4
       - name: Install 32-bit build dependencies
         run: |
           sudo dpkg --add-architecture i386 && \
diff --git a/.github/workflows/test_stubgenc.yml b/.github/workflows/test_stubgenc.yml
index 33466b9870ff..a2fb3e9dce6b 100644
--- a/.github/workflows/test_stubgenc.yml
+++ b/.github/workflows/test_stubgenc.yml
@@ -26,7 +26,7 @@ jobs:
     runs-on: ubuntu-latest
     steps:
 
-    - uses: actions/checkout@v3
+    - uses: actions/checkout@v4
 
     - name: Setup 🐍 3.8
       uses: actions/setup-python@v4

From 5d9d13ebc9899ec43699b8e91ec5587d6f962283 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Tue, 5 Sep 2023 14:38:58 +0300
Subject: [PATCH 111/288] Document `force_union_syntax` and
 `force_uppercase_builtins` (#16048)

Users don't know about them:
https://github.com/typeddjango/pytest-mypy-plugins/issues/126
Since they are quite important for testing, I think that it is a must to
include them.

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 docs/source/config_file.rst | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)

diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst
index c0798bbf03f1..b5ce23ff11ec 100644
--- a/docs/source/config_file.rst
+++ b/docs/source/config_file.rst
@@ -816,6 +816,22 @@ These options may only be set in the global section (``[mypy]``).
 
     Show absolute paths to files.
 
+.. confval:: force_uppercase_builtins
+
+    :type: boolean
+    :default: False
+
+    Always use ``List`` instead of ``list`` in error messages,
+    even on Python 3.9+.
+
+.. confval:: force_union_syntax
+
+    :type: boolean
+    :default: False
+
+    Always use ``Union[]`` and ``Optional[]`` for union types
+    in error messages (instead of the ``|`` operator),
+    even on Python 3.10+.
 
 Incremental mode
 ****************

From c0906408c10d24d748711fa24be5befb2c794d4c Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Tue, 5 Sep 2023 14:57:38 +0300
Subject: [PATCH 112/288] Add docs about `--force-uppercase-builtins` and
 `--force-union-syntax` (#16049)

Refs https://github.com/python/mypy/pull/16048
---
 docs/source/command_line.rst | 11 +++++++++++
 1 file changed, 11 insertions(+)

diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
index 727d500e2d4d..4e954c7c2ccb 100644
--- a/docs/source/command_line.rst
+++ b/docs/source/command_line.rst
@@ -789,6 +789,17 @@ in error messages.
     useful or they may be overly noisy. If ``N`` is negative, there is
     no limit. The default limit is 200.
 
+.. option:: --force-uppercase-builtins
+
+    Always use ``List`` instead of ``list`` in error messages,
+    even on Python 3.9+.
+
+.. option:: --force-union-syntax
+
+    Always use ``Union[]`` and ``Optional[]`` for union types
+    in error messages (instead of the ``|`` operator),
+    even on Python 3.10+.
+
 
 .. _incremental:
 

From ed9b8990025a81a12e32bec59f2f3bfab3d7c71b Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Wed, 6 Sep 2023 19:43:24 +0200
Subject: [PATCH 113/288] Clear cache when adding --new-type-inference (#16059)

Add `new_type_inference` to the list of options affecting the cache.
---
 mypy/options.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/mypy/options.py b/mypy/options.py
index 5e451c0aa0a3..007ae0a78aa1 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -62,6 +62,7 @@ class BuildType:
     | {
         "platform",
         "bazel",
+        "new_type_inference",
         "plugins",
         "disable_bytearray_promotion",
         "disable_memoryview_promotion",

From 175c5a59f18df9d56b3c2fb0e2a9669dd196c311 Mon Sep 17 00:00:00 2001
From: Randolf Scholz <randolf.scholz@gmail.com>
Date: Thu, 7 Sep 2023 18:30:07 +0200
Subject: [PATCH 114/288] Introduce error category [unsafe-overload] (#16061)

fixes #16060

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 mypy/errorcodes.py                   |  7 +++++++
 mypy/messages.py                     |  1 +
 mypy/types.py                        |  2 +-
 test-data/unit/check-errorcodes.test | 14 ++++++++++++++
 4 files changed, 23 insertions(+), 1 deletion(-)

diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py
index 3594458fa362..70b8cffe9053 100644
--- a/mypy/errorcodes.py
+++ b/mypy/errorcodes.py
@@ -261,3 +261,10 @@ def __hash__(self) -> int:
 
 # This is a catch-all for remaining uncategorized errors.
 MISC: Final = ErrorCode("misc", "Miscellaneous other checks", "General")
+
+UNSAFE_OVERLOAD: Final[ErrorCode] = ErrorCode(
+    "unsafe-overload",
+    "Warn if multiple @overload variants overlap in unsafe ways",
+    "General",
+    sub_code_of=MISC,
+)
diff --git a/mypy/messages.py b/mypy/messages.py
index bba9c3c3cdea..a58c5f91c4b1 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -1604,6 +1604,7 @@ def overloaded_signatures_overlap(self, index1: int, index2: int, context: Conte
             "Overloaded function signatures {} and {} overlap with "
             "incompatible return types".format(index1, index2),
             context,
+            code=codes.UNSAFE_OVERLOAD,
         )
 
     def overloaded_signature_will_never_match(
diff --git a/mypy/types.py b/mypy/types.py
index f974157ce84d..cee4595b67cc 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -3019,7 +3019,7 @@ def get_proper_type(typ: Type | None) -> ProperType | None:
 
 
 @overload
-def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]:  # type: ignore[misc]
+def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]:  # type: ignore[unsafe-overload]
     ...
 
 
diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test
index df14e328ed72..72edf2f22c05 100644
--- a/test-data/unit/check-errorcodes.test
+++ b/test-data/unit/check-errorcodes.test
@@ -1072,3 +1072,17 @@ A.f = h  # type: ignore[assignment]  # E: Unused "type: ignore" comment, use nar
 [case testUnusedIgnoreEnableCode]
 # flags: --enable-error-code=unused-ignore
 x = 1  # type: ignore  # E: Unused "type: ignore" comment  [unused-ignore]
+
+[case testErrorCodeUnsafeOverloadError]
+from typing import overload, Union
+
+@overload
+def unsafe_func(x: int) -> int: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types  [unsafe-overload]
+@overload
+def unsafe_func(x: object) -> str: ...
+def unsafe_func(x: object) -> Union[int, str]:
+    if isinstance(x, int):
+        return 42
+    else:
+        return "some string"
+[builtins fixtures/isinstancelist.pyi]

From 816ba3b33dd157def6b7d8c0b0fcca65ff2cbc05 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Thu, 7 Sep 2023 21:21:46 +0100
Subject: [PATCH 115/288] Build the docs in CI for all PRs touching the `mypy/`
 directory (#16068)

1. #16061 added a new error code, but didn't add any docs for the new
error code
2. Because nothing in the `docs/` directory was modified, the docs CI
job didn't run on that PR
3. Now the docs build is failing on `master` because we have an error
code without any documentation:
https://github.com/python/mypy/actions/runs/6112378542/job/16589719563
---
 .github/workflows/docs.yml | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 6c53afb9aa7c..ad6b57c53fd9 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -8,6 +8,10 @@ on:
   pull_request:
     paths:
     - 'docs/**'
+    # We now have a docs check that fails if any error codes don't have documentation,
+    # so it's important to do the docs build on all PRs touching mypy/errorcodes.py
+    # in case somebody's adding a new error code without any docs
+    - 'mypy/errorcodes.py'
     - 'mypyc/doc/**'
     - '**/*.rst'
     - '**/*.md'

From 8b73cc22c6a251682f777b104677fa0e1ed5fd67 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 7 Sep 2023 23:23:25 +0100
Subject: [PATCH 116/288] Complete type analysis of variadic types (#15991)

This PR closes the first part of support for `TypeVarTuple`: the
"static" analysis of types (of course everything is static in mypy, but
some parts are more static): `semanal`/`typeanal`, `expand_type()`,
`map_instance_to_supertype()`, `erase_type()` (things that precede
and/or form foundation for type inference and subtyping). This one was
quite tricky, supporting unpacks of forward references required some
thinking.

What is included in this PR:
* Moving argument count validation from `semanal_typeargs` to
`typeanal`. In one of previous PRs I mentioned that `get_proper_type()`
may be called during semantic analysis causing troubles if we have
invalid aliases. So we need to move validation to early stage. For
instances, this is not required, but I strongly prefer keeping instances
and aliases similar. And ideally at some point we can combine the logic,
since it gets more and more similar. At some point we may want to
prohibit using `get_proper_type()` during semantic analysis, but I don't
want to block `TypeVarTuple` support on this, since this may be a
significant refactoring.
* Fixing `map_instance_to_supertype()` and `erase_type()`. These two are
straightforward, we either use `expand_type()` logic directly (by
calling it), or following the same logic.
* Few simplifications in `expandtype` and `typeops` following previous
normalizations of representation, unless there is a flaw in my logic,
removed branches should be all dead code.
* Allow (only fixed) unpacks in argument lists for non-variadic types.
They were prohibited for no good reason.
* (Somewhat limited) support for forward references in unpacks. As I
mentioned this one is tricky because of how forward references are
represented. Usually they follow either a life cycle like: `Any` ->
`<known type>`, or `<Any>` -> `<placeholder>` -> `<known type>` (second
one is relatively rare and usually only appears for potentially
recursive things like base classes or type alias targets). It looks like
`<placeholder>` can never appear as a _valid_ unpack target, I don't
have a proof for this, but I was not able to trigger this, so I am not
handling it (possible downside is that there may be extra errors about
invalid argument count for invalid unpack targets). If I am wrong and
this can happen in some valid cases, we can add handling for unpacks of
placeholders later. Currently, the handling for `Any` stage of forward
references is following: if we detect it, we simply create a dummy valid
alias or instance. This logic should work for the same reason having
plain `Any` worked in the first place (and why all tests pass if we
delete `visit_placeholder_type()`): because (almost) each time we
analyze a type, it is either already complete, or we analyze it _from
scratch_, i.e. we call `expr_to_unanalyzed_type()`, then
`visit_unbound_type()` etc. We almost never store "partially analyzed"
types (there are guards against incomplete references and placeholders
in annotations), and when we do, it is done in a controlled way that
guarantees a type will be re-analyzed again. Since this is such a tricky
subject, I didn't add any complex logic to support more tricky use cases
(like multiple forward references to fixed unpacks in single list). I
propose that we release this, and then see what kind of bug reports we
will get.
* Additional validation for type arguments position to ensure that
`TypeVarTuple`s are never split. Total count is not enough to ban case
where we have type variables `[T, *Ts, S, U]` and arguments `[int, int,
*Us, int]`. We need to explicitly ensure that actual suffix and prefix
are longer or equal to formal ones. Such splitting would be very hard to
support, and is explicitly banned by the PEP.
* Few minor cleanups.

Some random comments:
* It is tricky to preserve valid parts of type arguments, if there is an
argument count error involving an unpack. So after such error I simply
set all arguments to `Any` (or `*tuple[Any, ...]` when needed).
* I know there is some code duplication. I tried to factor it away, but
it turned out non-trivial. I may do some de-duplication pass after
everything is done, and it is easier to see the big picture.
* Type applications (i.e. when we have `A[int, int]` in runtime context)
are wild west currently. I decided to postpone variadic support for them
to a separate PR, because there is already some support (we will just
need to handle edge cases and more error conditions) and I wanted
minimize size of this PR.
* Something I wanted to mention in one of previous PRs but forgot: Long
time ago I proposed to normalize away type aliases inside `Unpack`, but
I abandoned this idea, it doesn't really give us any benefits.

As I said, this is the last PR for the "static part", in the next PR I
will work on fixing subtyping and inference for variadic instances. And
then will continue with remaining items I mentioned in my master plan in
https://github.com/python/mypy/pull/15924

Fixes https://github.com/python/mypy/issues/15978

---------

Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
---
 mypy/erasetype.py                       |  34 ++++-
 mypy/expandtype.py                      |  66 ++++-----
 mypy/maptype.py                         |  22 +--
 mypy/semanal_typeargs.py                |  61 ++------
 mypy/test/testtypes.py                  |   2 +-
 mypy/typeanal.py                        | 177 ++++++++++++++++++------
 mypy/typeops.py                         |   4 +-
 test-data/unit/check-typevar-tuple.test | 123 +++++++++++++++-
 8 files changed, 329 insertions(+), 160 deletions(-)

diff --git a/mypy/erasetype.py b/mypy/erasetype.py
index fbbb4f80b578..d1a01fb6c779 100644
--- a/mypy/erasetype.py
+++ b/mypy/erasetype.py
@@ -165,9 +165,41 @@ def visit_type_var(self, t: TypeVarType) -> Type:
             return self.replacement
         return t
 
+    # TODO: below two methods duplicate some logic with expand_type().
+    # In fact, we may want to refactor this whole visitor to use expand_type().
+    def visit_instance(self, t: Instance) -> Type:
+        result = super().visit_instance(t)
+        assert isinstance(result, ProperType) and isinstance(result, Instance)
+        if t.type.fullname == "builtins.tuple":
+            # Normalize Tuple[*Tuple[X, ...], ...] -> Tuple[X, ...]
+            arg = result.args[0]
+            if isinstance(arg, UnpackType):
+                unpacked = get_proper_type(arg.type)
+                if isinstance(unpacked, Instance):
+                    assert unpacked.type.fullname == "builtins.tuple"
+                    return unpacked
+        return result
+
+    def visit_tuple_type(self, t: TupleType) -> Type:
+        result = super().visit_tuple_type(t)
+        assert isinstance(result, ProperType) and isinstance(result, TupleType)
+        if len(result.items) == 1:
+            # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...]
+            item = result.items[0]
+            if isinstance(item, UnpackType):
+                unpacked = get_proper_type(item.type)
+                if isinstance(unpacked, Instance):
+                    assert unpacked.type.fullname == "builtins.tuple"
+                    if result.partial_fallback.type.fullname != "builtins.tuple":
+                        # If it is a subtype (like named tuple) we need to preserve it,
+                        # this essentially mimics the logic in tuple_fallback().
+                        return result.partial_fallback.accept(self)
+                    return unpacked
+        return result
+
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type:
         if self.erase_id(t.id):
-            return self.replacement
+            return t.tuple_fallback.copy_modified(args=[self.replacement])
         return t
 
     def visit_param_spec(self, t: ParamSpecType) -> Type:
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index be8ecb9ccfd9..c29fcb167777 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -212,10 +212,15 @@ def visit_erased_type(self, t: ErasedType) -> Type:
 
     def visit_instance(self, t: Instance) -> Type:
         args = self.expand_types_with_unpack(list(t.args))
-        if isinstance(args, list):
-            return t.copy_modified(args=args)
-        else:
-            return args
+        if t.type.fullname == "builtins.tuple":
+            # Normalize Tuple[*Tuple[X, ...], ...] -> Tuple[X, ...]
+            arg = args[0]
+            if isinstance(arg, UnpackType):
+                unpacked = get_proper_type(arg.type)
+                if isinstance(unpacked, Instance):
+                    assert unpacked.type.fullname == "builtins.tuple"
+                    args = list(unpacked.args)
+        return t.copy_modified(args=args)
 
     def visit_type_var(self, t: TypeVarType) -> Type:
         # Normally upper bounds can't contain other type variables, the only exception is
@@ -285,7 +290,7 @@ def expand_unpack(self, t: UnpackType) -> list[Type]:
         ):
             return [UnpackType(typ=repl)]
         elif isinstance(repl, (AnyType, UninhabitedType)):
-            # Replace *Ts = Any with *Ts = *tuple[Any, ...] and some for Never.
+            # Replace *Ts = Any with *Ts = *tuple[Any, ...] and same for Never.
             # These types may appear here as a result of user error or failed inference.
             return [UnpackType(t.type.tuple_fallback.copy_modified(args=[repl]))]
         else:
@@ -377,15 +382,8 @@ def visit_overloaded(self, t: Overloaded) -> Type:
             items.append(new_item)
         return Overloaded(items)
 
-    def expand_types_with_unpack(
-        self, typs: Sequence[Type]
-    ) -> list[Type] | AnyType | UninhabitedType:
-        """Expands a list of types that has an unpack.
-
-        In corner cases, this can return a type rather than a list, in which case this
-        indicates use of Any or some error occurred earlier. In this case callers should
-        simply propagate the resulting type.
-        """
+    def expand_types_with_unpack(self, typs: Sequence[Type]) -> list[Type]:
+        """Expands a list of types that has an unpack."""
         items: list[Type] = []
         for item in typs:
             if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType):
@@ -396,24 +394,21 @@ def expand_types_with_unpack(
 
     def visit_tuple_type(self, t: TupleType) -> Type:
         items = self.expand_types_with_unpack(t.items)
-        if isinstance(items, list):
-            if len(items) == 1:
-                # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...]
-                item = items[0]
-                if isinstance(item, UnpackType):
-                    unpacked = get_proper_type(item.type)
-                    if isinstance(unpacked, Instance):
-                        assert unpacked.type.fullname == "builtins.tuple"
-                        if t.partial_fallback.type.fullname != "builtins.tuple":
-                            # If it is a subtype (like named tuple) we need to preserve it,
-                            # this essentially mimics the logic in tuple_fallback().
-                            return t.partial_fallback.accept(self)
-                        return unpacked
-            fallback = t.partial_fallback.accept(self)
-            assert isinstance(fallback, ProperType) and isinstance(fallback, Instance)
-            return t.copy_modified(items=items, fallback=fallback)
-        else:
-            return items
+        if len(items) == 1:
+            # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...]
+            item = items[0]
+            if isinstance(item, UnpackType):
+                unpacked = get_proper_type(item.type)
+                if isinstance(unpacked, Instance):
+                    assert unpacked.type.fullname == "builtins.tuple"
+                    if t.partial_fallback.type.fullname != "builtins.tuple":
+                        # If it is a subtype (like named tuple) we need to preserve it,
+                        # this essentially mimics the logic in tuple_fallback().
+                        return t.partial_fallback.accept(self)
+                    return unpacked
+        fallback = t.partial_fallback.accept(self)
+        assert isinstance(fallback, ProperType) and isinstance(fallback, Instance)
+        return t.copy_modified(items=items, fallback=fallback)
 
     def visit_typeddict_type(self, t: TypedDictType) -> Type:
         fallback = t.fallback.accept(self)
@@ -453,11 +448,8 @@ def visit_type_alias_type(self, t: TypeAliasType) -> Type:
         # Target of the type alias cannot contain type variables (not bound by the type
         # alias itself), so we just expand the arguments.
         args = self.expand_types_with_unpack(t.args)
-        if isinstance(args, list):
-            # TODO: normalize if target is Tuple, and args are [*tuple[X, ...]]?
-            return t.copy_modified(args=args)
-        else:
-            return args
+        # TODO: normalize if target is Tuple, and args are [*tuple[X, ...]]?
+        return t.copy_modified(args=args)
 
     def expand_types(self, types: Iterable[Type]) -> list[Type]:
         a: list[Type] = []
diff --git a/mypy/maptype.py b/mypy/maptype.py
index 4951306573c2..0d54a83127df 100644
--- a/mypy/maptype.py
+++ b/mypy/maptype.py
@@ -1,8 +1,8 @@
 from __future__ import annotations
 
-from mypy.expandtype import expand_type
+from mypy.expandtype import expand_type_by_instance
 from mypy.nodes import TypeInfo
-from mypy.types import AnyType, Instance, TupleType, Type, TypeOfAny, TypeVarId, has_type_vars
+from mypy.types import AnyType, Instance, TupleType, TypeOfAny, has_type_vars
 
 
 def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Instance:
@@ -25,8 +25,7 @@ def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Insta
             if not alias._is_recursive:
                 # Unfortunately we can't support this for generic recursive tuples.
                 # If we skip this special casing we will fall back to tuple[Any, ...].
-                env = instance_to_type_environment(instance)
-                tuple_type = expand_type(instance.type.tuple_type, env)
+                tuple_type = expand_type_by_instance(instance.type.tuple_type, instance)
                 if isinstance(tuple_type, TupleType):
                     # Make the import here to avoid cyclic imports.
                     import mypy.typeops
@@ -91,8 +90,7 @@ def map_instance_to_direct_supertypes(instance: Instance, supertype: TypeInfo) -
 
     for b in typ.bases:
         if b.type == supertype:
-            env = instance_to_type_environment(instance)
-            t = expand_type(b, env)
+            t = expand_type_by_instance(b, instance)
             assert isinstance(t, Instance)
             result.append(t)
 
@@ -103,15 +101,3 @@ def map_instance_to_direct_supertypes(instance: Instance, supertype: TypeInfo) -
         # type arguments implicitly.
         any_type = AnyType(TypeOfAny.unannotated)
         return [Instance(supertype, [any_type] * len(supertype.type_vars))]
-
-
-def instance_to_type_environment(instance: Instance) -> dict[TypeVarId, Type]:
-    """Given an Instance, produce the resulting type environment for type
-    variables bound by the Instance's class definition.
-
-    An Instance is a type application of a class (a TypeInfo) to its
-    required number of type arguments.  So this environment consists
-    of the class's type variables mapped to the Instance's actual
-    arguments.  The type variables are mapped by their `id`.
-    """
-    return {binder.id: arg for binder, arg in zip(instance.type.defn.type_vars, instance.args)}
diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py
index 749b02391e06..3e11951376c9 100644
--- a/mypy/semanal_typeargs.py
+++ b/mypy/semanal_typeargs.py
@@ -18,7 +18,6 @@
 from mypy.options import Options
 from mypy.scope import Scope
 from mypy.subtypes import is_same_type, is_subtype
-from mypy.typeanal import fix_type_var_tuple_argument, set_any_tvars
 from mypy.types import (
     AnyType,
     CallableType,
@@ -88,36 +87,7 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None:
             # types, since errors there have already been reported.
             return
         self.seen_aliases.add(t)
-        # Some recursive aliases may produce spurious args. In principle this is not very
-        # important, as we would simply ignore them when expanding, but it is better to keep
-        # correct aliases. Also, variadic aliases are better to check when fully analyzed,
-        # so we do this here.
         assert t.alias is not None, f"Unfixed type alias {t.type_ref}"
-        # TODO: consider moving this validation to typeanal.py, expanding invalid aliases
-        # during semantic analysis may cause crashes.
-        if t.alias.tvar_tuple_index is not None:
-            correct = len(t.args) >= len(t.alias.alias_tvars) - 1
-            if any(
-                isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance)
-                for a in t.args
-            ):
-                correct = True
-        else:
-            correct = len(t.args) == len(t.alias.alias_tvars)
-        if not correct:
-            if t.alias.tvar_tuple_index is not None:
-                exp_len = f"at least {len(t.alias.alias_tvars) - 1}"
-            else:
-                exp_len = f"{len(t.alias.alias_tvars)}"
-            self.fail(
-                "Bad number of arguments for type alias,"
-                f" expected: {exp_len}, given: {len(t.args)}",
-                t,
-                code=codes.TYPE_ARG,
-            )
-            t.args = set_any_tvars(
-                t.alias, t.line, t.column, self.options, from_error=True, fail=self.fail
-            ).args
         is_error = self.validate_args(t.alias.name, t.args, t.alias.alias_tvars, t)
         if not is_error:
             # If there was already an error for the alias itself, there is no point in checking
@@ -144,34 +114,21 @@ def visit_callable_type(self, t: CallableType) -> None:
                     t.arg_types[star_index] = p_type.args[0]
 
     def visit_instance(self, t: Instance) -> None:
+        super().visit_instance(t)
         # Type argument counts were checked in the main semantic analyzer pass. We assume
         # that the counts are correct here.
         info = t.type
         if isinstance(info, FakeInfo):
             return  # https://github.com/python/mypy/issues/11079
-        t.args = tuple(flatten_nested_tuples(t.args))
-        if t.type.has_type_var_tuple_type:
-            # Regular Instances are already validated in typeanal.py.
-            # TODO: do something with partial overlap (probably just reject).
-            # also in other places where split_with_prefix_and_suffix() is used.
-            correct = len(t.args) >= len(t.type.type_vars) - 1
-            if any(
-                isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance)
-                for a in t.args
-            ):
-                correct = True
-            if not correct:
-                exp_len = f"at least {len(t.type.type_vars) - 1}"
-                self.fail(
-                    f"Bad number of arguments, expected: {exp_len}, given: {len(t.args)}",
-                    t,
-                    code=codes.TYPE_ARG,
-                )
-                any_type = AnyType(TypeOfAny.from_error)
-                t.args = (any_type,) * len(t.type.type_vars)
-                fix_type_var_tuple_argument(any_type, t)
         self.validate_args(info.name, t.args, info.defn.type_vars, t)
-        super().visit_instance(t)
+        if t.type.fullname == "builtins.tuple" and len(t.args) == 1:
+            # Normalize Tuple[*Tuple[X, ...], ...] -> Tuple[X, ...]
+            arg = t.args[0]
+            if isinstance(arg, UnpackType):
+                unpacked = get_proper_type(arg.type)
+                if isinstance(unpacked, Instance):
+                    assert unpacked.type.fullname == "builtins.tuple"
+                    t.args = unpacked.args
 
     def validate_args(
         self, name: str, args: Sequence[Type], type_vars: list[TypeVarLikeType], ctx: Context
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
index 12e7b207b00a..59457dfa5d3b 100644
--- a/mypy/test/testtypes.py
+++ b/mypy/test/testtypes.py
@@ -1464,7 +1464,7 @@ def make_call(*items: tuple[str, str | None]) -> CallExpr:
 class TestExpandTypeLimitGetProperType(TestCase):
     # WARNING: do not increase this number unless absolutely necessary,
     # and you understand what you are doing.
-    ALLOWED_GET_PROPER_TYPES = 7
+    ALLOWED_GET_PROPER_TYPES = 8
 
     @skipUnless(mypy.expandtype.__file__.endswith(".py"), "Skip for compiled mypy")
     def test_count_get_proper_type(self) -> None:
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index ed1a8073887b..e297f2bf1631 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -82,6 +82,7 @@
     UnionType,
     UnpackType,
     callable_with_ellipsis,
+    find_unpack_in_list,
     flatten_nested_tuples,
     flatten_nested_unions,
     get_proper_type,
@@ -404,7 +405,7 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool)
                     t.args,
                     allow_param_spec=True,
                     allow_param_spec_literals=node.has_param_spec_type,
-                    allow_unpack=node.tvar_tuple_index is not None,
+                    allow_unpack=True,  # Fixed length unpacks can be used for non-variadic aliases.
                 )
                 if node.has_param_spec_type and len(node.alias_tvars) == 1:
                     an_args = self.pack_paramspec_args(an_args)
@@ -425,9 +426,8 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool)
                 # when it is top-level instance, so no need to recurse.
                 if (
                     isinstance(res, Instance)  # type: ignore[misc]
-                    and len(res.args) != len(res.type.type_vars)
                     and not self.defining_alias
-                    and not res.type.has_type_var_tuple_type
+                    and not validate_instance(res, self.fail)
                 ):
                     fix_instance(
                         res,
@@ -510,9 +510,6 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type:
                 code=codes.VALID_TYPE,
             )
             return AnyType(TypeOfAny.from_error)
-
-        # TODO: this may not work well with aliases, if those worked.
-        #   Those should be special-cased.
         elif isinstance(ps, ParamSpecType) and ps.prefix.arg_types:
             self.api.fail("Nested Concatenates are invalid", t, code=codes.VALID_TYPE)
 
@@ -728,7 +725,7 @@ def analyze_type_with_type_info(
                 args,
                 allow_param_spec=True,
                 allow_param_spec_literals=info.has_param_spec_type,
-                allow_unpack=info.has_type_var_tuple_type,
+                allow_unpack=True,  # Fixed length tuples can be used for non-variadic types.
             ),
             ctx.line,
             ctx.column,
@@ -736,19 +733,9 @@ def analyze_type_with_type_info(
         if len(info.type_vars) == 1 and info.has_param_spec_type:
             instance.args = tuple(self.pack_paramspec_args(instance.args))
 
-        if info.has_type_var_tuple_type:
-            if instance.args:
-                # -1 to account for empty tuple
-                valid_arg_length = len(instance.args) >= len(info.type_vars) - 1
-            # Empty case is special cased and we want to infer a Tuple[Any, ...]
-            # instead of the empty tuple, so no - 1 here.
-            else:
-                valid_arg_length = False
-        else:
-            valid_arg_length = len(instance.args) == len(info.type_vars)
-
         # Check type argument count.
-        if not valid_arg_length and not self.defining_alias:
+        instance.args = tuple(flatten_nested_tuples(instance.args))
+        if not self.defining_alias and not validate_instance(instance, self.fail):
             fix_instance(
                 instance,
                 self.fail,
@@ -1342,9 +1329,7 @@ def analyze_callable_type(self, t: UnboundType) -> Type:
                         callable_args, ret_type, fallback
                     )
                     if isinstance(maybe_ret, CallableType):
-                        maybe_ret = maybe_ret.copy_modified(
-                            ret_type=ret_type.accept(self), variables=variables
-                        )
+                        maybe_ret = maybe_ret.copy_modified(variables=variables)
                 if maybe_ret is None:
                     # Callable[?, RET] (where ? is something invalid)
                     self.fail(
@@ -1736,6 +1721,7 @@ def check_unpacks_in_list(self, items: list[Type]) -> list[Type]:
         num_unpacks = 0
         final_unpack = None
         for item in items:
+            # TODO: handle forward references here, they appear as Unpack[Any].
             if isinstance(item, UnpackType) and not isinstance(
                 get_proper_type(item.type), TupleType
             ):
@@ -1856,25 +1842,13 @@ def fix_instance(
         any_type = get_omitted_any(disallow_any, fail, note, t, options, fullname, unexpanded_type)
         t.args = (any_type,) * len(t.type.type_vars)
         fix_type_var_tuple_argument(any_type, t)
-
         return
-
-    if t.type.has_type_var_tuple_type:
-        # This can be only correctly analyzed when all arguments are fully
-        # analyzed, because there may be a variadic item among them, so we
-        # do this in semanal_typeargs.py.
-        return
-
-    # Invalid number of type parameters.
-    fail(
-        wrong_type_arg_count(len(t.type.type_vars), str(len(t.args)), t.type.name),
-        t,
-        code=codes.TYPE_ARG,
-    )
     # Construct the correct number of type arguments, as
     # otherwise the type checker may crash as it expects
     # things to be right.
-    t.args = tuple(AnyType(TypeOfAny.from_error) for _ in t.type.type_vars)
+    any_type = AnyType(TypeOfAny.from_error)
+    t.args = tuple(any_type for _ in t.type.type_vars)
+    fix_type_var_tuple_argument(any_type, t)
     t.invalid = True
 
 
@@ -1903,6 +1877,15 @@ def instantiate_type_alias(
         ctx: context where expansion happens
         unexpanded_type, disallow_any, use_standard_error: used to customize error messages
     """
+    # Type aliases are special, since they can be expanded during semantic analysis,
+    # so we need to normalize them as soon as possible.
+    # TODO: can this cause an infinite recursion?
+    args = flatten_nested_tuples(args)
+    if any(unknown_unpack(a) for a in args):
+        # This type is not ready to be validated, because of unknown total count.
+        # Note that we keep the kind of Any for consistency.
+        return set_any_tvars(node, ctx.line, ctx.column, options, special_form=True)
+
     exp_len = len(node.alias_tvars)
     act_len = len(args)
     if (
@@ -1937,22 +1920,54 @@ def instantiate_type_alias(
         tp.line = ctx.line
         tp.column = ctx.column
         return tp
-    if act_len != exp_len and node.tvar_tuple_index is None:
+    if node.tvar_tuple_index is None:
+        if any(isinstance(a, UnpackType) for a in args):
+            # A variadic unpack in fixed size alias (fixed unpacks must be flattened by the caller)
+            fail(message_registry.INVALID_UNPACK_POSITION, ctx, code=codes.VALID_TYPE)
+            return set_any_tvars(node, ctx.line, ctx.column, options, from_error=True)
+        correct = act_len == exp_len
+    else:
+        correct = act_len >= exp_len - 1
+        for a in args:
+            if isinstance(a, UnpackType):
+                unpacked = get_proper_type(a.type)
+                if isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple":
+                    # Variadic tuple is always correct.
+                    correct = True
+    if not correct:
         if use_standard_error:
             # This is used if type alias is an internal representation of another type,
             # for example a generic TypedDict or NamedTuple.
             msg = wrong_type_arg_count(exp_len, str(act_len), node.name)
         else:
-            msg = f"Bad number of arguments for type alias, expected: {exp_len}, given: {act_len}"
+            if node.tvar_tuple_index is not None:
+                exp_len_str = f"at least {exp_len - 1}"
+            else:
+                exp_len_str = str(exp_len)
+            msg = (
+                "Bad number of arguments for type alias,"
+                f" expected: {exp_len_str}, given: {act_len}"
+            )
         fail(msg, ctx, code=codes.TYPE_ARG)
         return set_any_tvars(node, ctx.line, ctx.column, options, from_error=True)
+    elif node.tvar_tuple_index is not None:
+        # We also need to check if we are not performing a type variable tuple split.
+        unpack = find_unpack_in_list(args)
+        if unpack is not None:
+            unpack_arg = args[unpack]
+            assert isinstance(unpack_arg, UnpackType)
+            if isinstance(unpack_arg.type, TypeVarTupleType):
+                exp_prefix = node.tvar_tuple_index
+                act_prefix = unpack
+                exp_suffix = len(node.alias_tvars) - node.tvar_tuple_index - 1
+                act_suffix = len(args) - unpack - 1
+                if act_prefix < exp_prefix or act_suffix < exp_suffix:
+                    fail("TypeVarTuple cannot be split", ctx, code=codes.TYPE_ARG)
+                    return set_any_tvars(node, ctx.line, ctx.column, options, from_error=True)
     # TODO: we need to check args validity w.r.t alias.alias_tvars.
     # Otherwise invalid instantiations will be allowed in runtime context.
     # Note: in type context, these will be still caught by semanal_typeargs.
-    # Type aliases are special, since they can be expanded during semantic analysis,
-    # so we need to normalize them as soon as possible.
-    # TODO: can this cause an infinite recursion?
-    typ = TypeAliasType(node, flatten_nested_tuples(args), ctx.line, ctx.column)
+    typ = TypeAliasType(node, args, ctx.line, ctx.column)
     assert typ.alias is not None
     # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here.
     if (
@@ -1973,11 +1988,14 @@ def set_any_tvars(
     *,
     from_error: bool = False,
     disallow_any: bool = False,
+    special_form: bool = False,
     fail: MsgCallback | None = None,
     unexpanded_type: Type | None = None,
 ) -> TypeAliasType:
     if from_error or disallow_any:
         type_of_any = TypeOfAny.from_error
+    elif special_form:
+        type_of_any = TypeOfAny.special_form
     else:
         type_of_any = TypeOfAny.from_omitted_generics
     if disallow_any and node.alias_tvars:
@@ -2227,6 +2245,63 @@ def make_optional_type(t: Type) -> Type:
         return UnionType([t, NoneType()], t.line, t.column)
 
 
+def validate_instance(t: Instance, fail: MsgCallback) -> bool:
+    """Check if this is a well-formed instance with respect to argument count/positions."""
+    # TODO: combine logic with instantiate_type_alias().
+    if any(unknown_unpack(a) for a in t.args):
+        # This type is not ready to be validated, because of unknown total count.
+        # TODO: is it OK to fill with TypeOfAny.from_error instead of special form?
+        return False
+    if t.type.has_type_var_tuple_type:
+        correct = len(t.args) >= len(t.type.type_vars) - 1
+        if any(
+            isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance)
+            for a in t.args
+        ):
+            correct = True
+        if not correct:
+            exp_len = f"at least {len(t.type.type_vars) - 1}"
+            fail(
+                f"Bad number of arguments, expected: {exp_len}, given: {len(t.args)}",
+                t,
+                code=codes.TYPE_ARG,
+            )
+            return False
+        elif not t.args:
+            # The Any arguments should be set by the caller.
+            return False
+        else:
+            # We also need to check if we are not performing a type variable tuple split.
+            unpack = find_unpack_in_list(t.args)
+            if unpack is not None:
+                unpack_arg = t.args[unpack]
+                assert isinstance(unpack_arg, UnpackType)
+                if isinstance(unpack_arg.type, TypeVarTupleType):
+                    assert t.type.type_var_tuple_prefix is not None
+                    assert t.type.type_var_tuple_suffix is not None
+                    exp_prefix = t.type.type_var_tuple_prefix
+                    act_prefix = unpack
+                    exp_suffix = t.type.type_var_tuple_suffix
+                    act_suffix = len(t.args) - unpack - 1
+                    if act_prefix < exp_prefix or act_suffix < exp_suffix:
+                        fail("TypeVarTuple cannot be split", t, code=codes.TYPE_ARG)
+                        return False
+    elif any(isinstance(a, UnpackType) for a in t.args):
+        # A variadic unpack in fixed size instance (fixed unpacks must be flattened by the caller)
+        fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE)
+        return False
+    elif len(t.args) != len(t.type.type_vars):
+        # Invalid number of type parameters.
+        if t.args:
+            fail(
+                wrong_type_arg_count(len(t.type.type_vars), str(len(t.args)), t.type.name),
+                t,
+                code=codes.TYPE_ARG,
+            )
+        return False
+    return True
+
+
 def fix_instance_types(t: Type, fail: MsgCallback, note: MsgCallback, options: Options) -> None:
     """Recursively fix all instance types (type argument count) in a given type.
 
@@ -2244,7 +2319,7 @@ def __init__(self, fail: MsgCallback, note: MsgCallback, options: Options) -> No
 
     def visit_instance(self, typ: Instance) -> None:
         super().visit_instance(typ)
-        if len(typ.args) != len(typ.type.type_vars) and not typ.type.has_type_var_tuple_type:
+        if not validate_instance(typ, self.fail):
             fix_instance(
                 typ,
                 self.fail,
@@ -2269,3 +2344,17 @@ def visit_unbound_type(self, t: UnboundType) -> bool:
         if sym and sym.fullname in SELF_TYPE_NAMES:
             return True
         return super().visit_unbound_type(t)
+
+
+def unknown_unpack(t: Type) -> bool:
+    """Check if a given type is an unpack of an unknown type.
+
+    Unfortunately, there is no robust way to distinguish forward references from
+    genuine undefined names here. But this worked well so far, although it looks
+    quite fragile.
+    """
+    if isinstance(t, UnpackType):
+        unpacked = get_proper_type(t.type)
+        if isinstance(unpacked, AnyType) and unpacked.type_of_any == TypeOfAny.special_form:
+            return True
+    return False
diff --git a/mypy/typeops.py b/mypy/typeops.py
index f9c1914cc9a8..3efa3cc3e965 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -110,10 +110,8 @@ def tuple_fallback(typ: TupleType) -> Instance:
                 and unpacked_type.type.fullname == "builtins.tuple"
             ):
                 items.append(unpacked_type.args[0])
-            elif isinstance(unpacked_type, (AnyType, UninhabitedType)):
-                continue
             else:
-                raise NotImplementedError(unpacked_type)
+                raise NotImplementedError
         else:
             items.append(item)
     # TODO: we should really use a union here, tuple types are special.
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index f7faab4818c9..2b47ff30cdfb 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -619,8 +619,7 @@ T = TypeVar("T")
 Ts = TypeVarTuple("Ts")
 
 A = List[Tuple[T, Unpack[Ts], T]]
-B = A[Unpack[Ts]]
-x: B[int, str, str]
+x: A[int, str, str]
 reveal_type(x)  # N: Revealed type is "builtins.list[Tuple[builtins.int, builtins.str, builtins.str, builtins.int]]"
 [builtins fixtures/tuple.pyi]
 
@@ -1052,8 +1051,7 @@ reveal_type(y.fn)  # N: Revealed type is "def (builtins.int, builtins.str)"
 
 z: A[Unpack[Tuple[int, ...]]]
 reveal_type(z)  # N: Revealed type is "__main__.A[Unpack[builtins.tuple[builtins.int, ...]]]"
-# TODO: this requires fixing map_instance_to_supertype().
-# reveal_type(z[0])
+reveal_type(z[0])  # N: Revealed type is "builtins.int"
 reveal_type(z.fn)  # N: Revealed type is "def (*builtins.int)"
 
 t: A[int, Unpack[Tuple[int, str]], str]
@@ -1118,3 +1116,120 @@ reveal_type(td)  # N: Revealed type is "TypedDict('__main__.A', {'fn': def (buil
 def bad() -> int: ...
 td2 = A({"fn": bad, "val": 42})  # E: Incompatible types (expression has type "Callable[[], int]", TypedDict item "fn" has type "Callable[[], None]")
 [builtins fixtures/tuple.pyi]
+
+[case testFixedUnpackWithRegularInstance]
+from typing import Tuple, Generic, TypeVar
+from typing_extensions import Unpack
+
+T1 = TypeVar("T1")
+T2 = TypeVar("T2")
+T3 = TypeVar("T3")
+T4 = TypeVar("T4")
+
+class C(Generic[T1, T2, T3, T4]): ...
+x: C[int, Unpack[Alias], str]
+Alias = Tuple[int, str]
+reveal_type(x)  # N: Revealed type is "__main__.C[builtins.int, builtins.int, builtins.str, builtins.str]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicUnpackWithRegularInstance]
+from typing import Tuple, Generic, TypeVar
+from typing_extensions import Unpack
+
+T1 = TypeVar("T1")
+T2 = TypeVar("T2")
+T3 = TypeVar("T3")
+T4 = TypeVar("T4")
+
+class C(Generic[T1, T2, T3, T4]): ...
+x: C[int, Unpack[Alias], str, str]  # E: Unpack is only valid in a variadic position
+Alias = Tuple[int, ...]
+reveal_type(x)  # N: Revealed type is "__main__.C[Any, Any, Any, Any]"
+y: C[int, Unpack[Undefined]]  # E: Name "Undefined" is not defined
+reveal_type(y)  # N: Revealed type is "__main__.C[Any, Any, Any, Any]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicAliasInvalidUnpackNoCrash]
+from typing import Tuple, Generic, Union, List
+from typing_extensions import Unpack, TypeVarTuple
+
+Ts = TypeVarTuple("Ts")
+Alias = Tuple[int, Unpack[Ts], str]
+
+A = Union[int, str]
+x: List[Alias[int, Unpack[A], str]]  # E: "Union[int, str]" cannot be unpacked (must be tuple or TypeVarTuple)
+reveal_type(x)  # N: Revealed type is "builtins.list[Tuple[builtins.int, builtins.int, Unpack[builtins.tuple[Any, ...]], builtins.str, builtins.str]]"
+y: List[Alias[int, Unpack[Undefined], str]]  # E: Name "Undefined" is not defined
+reveal_type(y)  # N: Revealed type is "builtins.list[Tuple[builtins.int, Unpack[builtins.tuple[Any, ...]], builtins.str]]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicAliasForwardRefToFixedUnpack]
+from typing import Tuple, Generic, TypeVar
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+Alias = Tuple[T, Unpack[Ts], S]
+x: Alias[int, Unpack[Other]]
+Other = Tuple[int, str]
+reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.str]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicAliasForwardRefToVariadicUnpack]
+from typing import Tuple, Generic, TypeVar
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+Alias = Tuple[T, Unpack[Ts], S]
+x: Alias[int, Unpack[Other]]
+Other = Tuple[int, ...]
+reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]], builtins.int]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicInstanceStrictPrefixSuffixCheck]
+from typing import Tuple, Generic, TypeVar
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+class C(Generic[T, Unpack[Ts], S]): ...
+
+def foo(x: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]:
+    y: C[int, Unpack[Ts]]  # E: TypeVarTuple cannot be split
+    z: C[Unpack[Ts], int]  # E: TypeVarTuple cannot be split
+    return x
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicAliasStrictPrefixSuffixCheck]
+from typing import Tuple, TypeVar
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+Alias = Tuple[T, Unpack[Ts], S]
+
+def foo(x: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]:
+    y: Alias[int, Unpack[Ts]]  # E: TypeVarTuple cannot be split
+    z: Alias[Unpack[Ts], int]  # E: TypeVarTuple cannot be split
+    return x
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleWithIsInstance]
+# flags: --warn-unreachable
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+TP = TypeVarTuple("TP")
+class A(Tuple[Unpack[TP]]): ...
+
+def test(d: A[int, str]) -> None:
+    if isinstance(d, A):
+        reveal_type(d)  # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.A[builtins.int, builtins.str]]"
+    else:
+        reveal_type(d)  # E: Statement is unreachable
+[builtins fixtures/isinstancelist.pyi]

From 49419835045b09c98b545171abb10384b6ecf6a9 Mon Sep 17 00:00:00 2001
From: Matt Bogosian <matt@bogosian.net>
Date: Fri, 8 Sep 2023 01:46:14 -0500
Subject: [PATCH 117/288] Differentiate between venv and tox setups in
 CONTRIBUTING.md (#16067)

---
 CONTRIBUTING.md | 42 ++++++++++++++++++++++++++++++------------
 tox.ini         |  1 +
 2 files changed, 31 insertions(+), 12 deletions(-)

diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 82e55f437e87..46292c301406 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -62,18 +62,6 @@ like this:
 python3 runtests.py
 ```
 
-You can also use `tox` to run tests (`tox` handles setting up the test environment for you):
-
-```bash
-tox run -e py
-
-# Or some specific python version:
-tox run -e py39
-
-# Or some specific command:
-tox run -e lint
-```
-
 Some useful commands for running specific tests include:
 
 ```bash
@@ -95,6 +83,36 @@ python runtests.py lint
 For an in-depth guide on running and writing tests,
 see [the README in the test-data directory](test-data/unit/README.md).
 
+#### Using `tox`
+
+You can also use [`tox`](https://tox.wiki/en/latest/) to run tests and other commands.
+`tox` handles setting up test environments for you.
+
+```bash
+# Run tests
+tox run -e py
+
+# Run tests using some specific Python version
+tox run -e py311
+
+# Run a specific command
+tox run -e lint
+
+# Run a single test from the test suite
+tox run -e py -- -n0 -k 'test_name'
+
+# Run all test cases in the "test-data/unit/check-dataclasses.test" file using
+# Python 3.11 specifically
+tox run -e py311 -- mypy/test/testcheck.py::TypeCheckSuite::check-dataclasses.test
+
+# Set up a development environment with all the project libraries and run a command
+tox -e dev -- mypy --verbose test_case.py
+tox -e dev --override testenv:dev.allowlist_externals+=env -- env  # inspect the environment
+```
+
+If you don't already have `tox` installed, you can use a virtual environment as
+described above to install `tox` via `pip` (e.g., ``python3 -m pip install tox``).
+
 ## First time contributors
 
 If you're looking for things to help with, browse our [issue tracker](https://github.com/python/mypy/issues)!
diff --git a/tox.ini b/tox.ini
index e07acdc5200d..31aed1a1ef48 100644
--- a/tox.ini
+++ b/tox.ini
@@ -30,6 +30,7 @@ deps =
 commands =
     python -m pip list --format=columns
     python -c 'import sys; print(sys.executable)'
+    {posargs}
 
 [testenv:docs]
 description = invoke sphinx-build to build the HTML docs

From f9dc5610423d368bcf804b6a88a2d8502e62df1c Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Sun, 10 Sep 2023 01:55:57 -0400
Subject: [PATCH 118/288] Fix __post_init__() internal error (#16080)

Fixes #16057.
---
 mypy/checker.py                       | 5 ++++-
 mypy/nodes.py                         | 1 -
 mypy/plugins/dataclasses.py           | 4 ++--
 test-data/unit/check-dataclasses.test | 4 ++++
 4 files changed, 10 insertions(+), 4 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index fa7c645873d0..5a74f019dcf4 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -1076,6 +1076,8 @@ def check_func_item(
 
         if name == "__exit__":
             self.check__exit__return_type(defn)
+        # TODO: the following logic should move to the dataclasses plugin
+        #  https://github.com/python/mypy/issues/15515
         if name == "__post_init__":
             if dataclasses_plugin.is_processed_dataclass(defn.info):
                 dataclasses_plugin.check_post_init(self, defn, defn.info)
@@ -2882,7 +2884,8 @@ def check_assignment(
                     typ = self.expr_checker.accept(rvalue)
                     self.check_match_args(inferred, typ, lvalue)
                 if name == "__post_init__":
-                    if dataclasses_plugin.is_processed_dataclass(self.scope.active_class()):
+                    active_class = self.scope.active_class()
+                    if active_class and dataclasses_plugin.is_processed_dataclass(active_class):
                         self.fail(message_registry.DATACLASS_POST_INIT_MUST_BE_A_FUNCTION, rvalue)
 
             # Defer PartialType's super type checking.
diff --git a/mypy/nodes.py b/mypy/nodes.py
index d29e99ccace7..6556cd910b46 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -515,7 +515,6 @@ def __init__(self) -> None:
         # Original, not semantically analyzed type (used for reprocessing)
         self.unanalyzed_type: mypy.types.ProperType | None = None
         # If method, reference to TypeInfo
-        # TODO: Type should be Optional[TypeInfo]
         self.info = FUNC_NO_INFO
         self.is_property = False
         self.is_class = False
diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py
index 8b34c28b6832..99f079705c3f 100644
--- a/mypy/plugins/dataclasses.py
+++ b/mypy/plugins/dataclasses.py
@@ -1070,8 +1070,8 @@ def replace_function_sig_callback(ctx: FunctionSigContext) -> CallableType:
     )
 
 
-def is_processed_dataclass(info: TypeInfo | None) -> bool:
-    return info is not None and "dataclass" in info.metadata
+def is_processed_dataclass(info: TypeInfo) -> bool:
+    return bool(info) and "dataclass" in info.metadata
 
 
 def check_post_init(api: TypeChecker, defn: FuncItem, info: TypeInfo) -> None:
diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test
index 8a50e7124d05..35df84658259 100644
--- a/test-data/unit/check-dataclasses.test
+++ b/test-data/unit/check-dataclasses.test
@@ -2280,6 +2280,10 @@ reveal_type(a2)  # N: Revealed type is "__main__.A[builtins.int]"
 
 [builtins fixtures/tuple.pyi]
 
+[case testPostInitNotMethod]
+def __post_init__() -> None:
+    pass
+
 [case testPostInitCorrectSignature]
 from typing import Any, Generic, TypeVar, Callable, Self
 from dataclasses import dataclass, InitVar

From ed18fea5b17ef3a969b37b4906dd7c237ddb1825 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 9 Sep 2023 23:35:07 -0700
Subject: [PATCH 119/288] Document and rename overload-overlap error code
 (#16074)

A new error code was introduced in
https://github.com/python/mypy/pull/16061

As per https://github.com/python/mypy/pull/16068, we didn't previously
run doc builds on changes to errorcodes.py, causing tests to fail on
master when this was merged.

Renaming the code as per:
https://github.com/python/mypy/pull/16061#issuecomment-1710613890 All
type ignores should be unsafe, so we should save the unsafe adjective
for things that are really unsafe. As it stands, there are many cases
where overloads overlap somewhat benignly.

Fixes #8656
---
 docs/source/error_code_list.rst      | 35 ++++++++++++++++++++++++++++
 docs/source/more_types.rst           |  5 +++-
 mypy/errorcodes.py                   |  4 ++--
 mypy/messages.py                     |  2 +-
 mypy/types.py                        |  2 +-
 test-data/unit/check-errorcodes.test |  2 +-
 6 files changed, 44 insertions(+), 6 deletions(-)

diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst
index a865a4dd1532..4decd37e6e8a 100644
--- a/docs/source/error_code_list.rst
+++ b/docs/source/error_code_list.rst
@@ -1114,6 +1114,41 @@ Warn about cases where a bytes object may be converted to a string in an unexpec
     print(f"The alphabet starts with {b!r}")  # The alphabet starts with b'abc'
     print(f"The alphabet starts with {b.decode('utf-8')}")  # The alphabet starts with abc
 
+.. _code-overload-overlap:
+
+Check that overloaded functions don't overlap [overload-overlap]
+----------------------------------------------------------------
+
+Warn if multiple ``@overload`` variants overlap in potentially unsafe ways.
+This guards against the following situation:
+
+.. code-block:: python
+
+    from typing import overload
+
+    class A: ...
+    class B(A): ...
+
+    @overload
+    def foo(x: B) -> int: ...  # Error: Overloaded function signatures 1 and 2 overlap with incompatible return types  [overload-overlap]
+    @overload
+    def foo(x: A) -> str: ...
+    def foo(x): ...
+
+    def takes_a(a: A) -> str:
+        return foo(a)
+
+    a: A = B()
+    value = takes_a(a)
+    # mypy will think that value is a str, but it could actually be an int
+    reveal_type(value) # Revealed type is "builtins.str"
+
+
+Note that in cases where you ignore this error, mypy will usually still infer the
+types you expect.
+
+See :ref:`overloading <function-overloading>` for more explanation.
+
 .. _code-annotation-unchecked:
 
 Notify about an annotation in an unchecked function [annotation-unchecked]
diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst
index 4e6e9204fdca..b27764a9e87c 100644
--- a/docs/source/more_types.rst
+++ b/docs/source/more_types.rst
@@ -501,7 +501,7 @@ To prevent these kinds of issues, mypy will detect and prohibit inherently unsaf
 overlapping overloads on a best-effort basis. Two variants are considered unsafely
 overlapping when both of the following are true:
 
-1. All of the arguments of the first variant are compatible with the second.
+1. All of the arguments of the first variant are potentially compatible with the second.
 2. The return type of the first variant is *not* compatible with (e.g. is not a
    subtype of) the second.
 
@@ -510,6 +510,9 @@ the ``object`` argument in the second, yet the ``int`` return type is not a subt
 ``str``. Both conditions are true, so mypy will correctly flag ``unsafe_func`` as
 being unsafe.
 
+Note that in cases where you ignore the overlapping overload error, mypy will usually
+still infer the types you expect at callsites.
+
 However, mypy will not detect *all* unsafe uses of overloads. For example,
 suppose we modify the above snippet so it calls ``summarize`` instead of
 ``unsafe_func``:
diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py
index 70b8cffe9053..cd9978c2f31c 100644
--- a/mypy/errorcodes.py
+++ b/mypy/errorcodes.py
@@ -262,8 +262,8 @@ def __hash__(self) -> int:
 # This is a catch-all for remaining uncategorized errors.
 MISC: Final = ErrorCode("misc", "Miscellaneous other checks", "General")
 
-UNSAFE_OVERLOAD: Final[ErrorCode] = ErrorCode(
-    "unsafe-overload",
+OVERLOAD_OVERLAP: Final[ErrorCode] = ErrorCode(
+    "overload-overlap",
     "Warn if multiple @overload variants overlap in unsafe ways",
     "General",
     sub_code_of=MISC,
diff --git a/mypy/messages.py b/mypy/messages.py
index a58c5f91c4b1..b6fdaf06a8e0 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -1604,7 +1604,7 @@ def overloaded_signatures_overlap(self, index1: int, index2: int, context: Conte
             "Overloaded function signatures {} and {} overlap with "
             "incompatible return types".format(index1, index2),
             context,
-            code=codes.UNSAFE_OVERLOAD,
+            code=codes.OVERLOAD_OVERLAP,
         )
 
     def overloaded_signature_will_never_match(
diff --git a/mypy/types.py b/mypy/types.py
index cee4595b67cc..04d90c9dc124 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -3019,7 +3019,7 @@ def get_proper_type(typ: Type | None) -> ProperType | None:
 
 
 @overload
-def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]:  # type: ignore[unsafe-overload]
+def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]:  # type: ignore[overload-overlap]
     ...
 
 
diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test
index 72edf2f22c05..ac7c8b4c9f9d 100644
--- a/test-data/unit/check-errorcodes.test
+++ b/test-data/unit/check-errorcodes.test
@@ -1077,7 +1077,7 @@ x = 1  # type: ignore  # E: Unused "type: ignore" comment  [unused-ignore]
 from typing import overload, Union
 
 @overload
-def unsafe_func(x: int) -> int: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types  [unsafe-overload]
+def unsafe_func(x: int) -> int: ...  # E: Overloaded function signatures 1 and 2 overlap with incompatible return types  [overload-overlap]
 @overload
 def unsafe_func(x: object) -> str: ...
 def unsafe_func(x: object) -> Union[int, str]:

From 9a35360739ced871feb6331a14a7bbacce00c7dc Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sun, 10 Sep 2023 21:11:49 +0300
Subject: [PATCH 120/288] Add `add_overloaded_method_to_class` helper to
 `plugins/common.py` (#16038)

There are several changes:

1. `add_overloaded_method_to_class` itself. It is very useful for plugin
authors, because right now it is quite easy to add a regular method, but
it is very hard to add a method with `@overload`s. I don't think that
user must face all the chalenges that I've covered in this method.
Moreover, it is quite easy even for experienced developers to forget
some flags / props / etc (I am pretty sure that I might forgot something
in the implementation)
2. `add_overloaded_method_to_class` and `add_method_to_class` now return
added nodes, it is also helpful if you want to do something with this
node in your plugin after it is created
3. I've refactored how `add_method_to_class` works and reused its parts
in the new method as well
4. `tvar_def` in `add_method_to_class` can now accept a list of type
vars, not just one

Notice that `add_method_to_class` is unchanged from the user's POV, it
should continue to work as before.

Tests are also updated to check that our overloads are correct.

Things to do later (in the next PRs / releases):
1. We can possibly add `is_final` param to methods as well
2. We can also support `@property` in a separate method at some point

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 mypy/plugins/common.py                        | 136 +++++++++++++++---
 test-data/unit/check-custom-plugin.test       |  24 +++-
 test-data/unit/check-incremental.test         |  38 +++++
 test-data/unit/deps.test                      |   6 +-
 .../unit/plugins/add_overloaded_method.py     |  41 ++++++
 5 files changed, 222 insertions(+), 23 deletions(-)
 create mode 100644 test-data/unit/plugins/add_overloaded_method.py

diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py
index 55f2870cadb4..84d50b7086c6 100644
--- a/mypy/plugins/common.py
+++ b/mypy/plugins/common.py
@@ -1,5 +1,7 @@
 from __future__ import annotations
 
+from typing import NamedTuple
+
 from mypy.argmap import map_actuals_to_formals
 from mypy.fixup import TypeFixer
 from mypy.nodes import (
@@ -16,9 +18,11 @@
     JsonDict,
     NameExpr,
     Node,
+    OverloadedFuncDef,
     PassStmt,
     RefExpr,
     SymbolTableNode,
+    TypeInfo,
     Var,
 )
 from mypy.plugin import CheckerPluginInterface, ClassDefContext, SemanticAnalyzerPluginInterface
@@ -209,24 +213,99 @@ def add_method(
     )
 
 
+class MethodSpec(NamedTuple):
+    """Represents a method signature to be added, except for `name`."""
+
+    args: list[Argument]
+    return_type: Type
+    self_type: Type | None = None
+    tvar_defs: list[TypeVarType] | None = None
+
+
 def add_method_to_class(
     api: SemanticAnalyzerPluginInterface | CheckerPluginInterface,
     cls: ClassDef,
     name: str,
+    # MethodSpec items kept for backward compatibility:
     args: list[Argument],
     return_type: Type,
     self_type: Type | None = None,
-    tvar_def: TypeVarType | None = None,
+    tvar_def: list[TypeVarType] | TypeVarType | None = None,
     is_classmethod: bool = False,
     is_staticmethod: bool = False,
-) -> None:
+) -> FuncDef | Decorator:
     """Adds a new method to a class definition."""
+    _prepare_class_namespace(cls, name)
 
-    assert not (
-        is_classmethod is True and is_staticmethod is True
-    ), "Can't add a new method that's both staticmethod and classmethod."
+    if tvar_def is not None and not isinstance(tvar_def, list):
+        tvar_def = [tvar_def]
+
+    func, sym = _add_method_by_spec(
+        api,
+        cls.info,
+        name,
+        MethodSpec(args=args, return_type=return_type, self_type=self_type, tvar_defs=tvar_def),
+        is_classmethod=is_classmethod,
+        is_staticmethod=is_staticmethod,
+    )
+    cls.info.names[name] = sym
+    cls.info.defn.defs.body.append(func)
+    return func
 
+
+def add_overloaded_method_to_class(
+    api: SemanticAnalyzerPluginInterface | CheckerPluginInterface,
+    cls: ClassDef,
+    name: str,
+    items: list[MethodSpec],
+    is_classmethod: bool = False,
+    is_staticmethod: bool = False,
+) -> OverloadedFuncDef:
+    """Adds a new overloaded method to a class definition."""
+    assert len(items) >= 2, "Overloads must contain at least two cases"
+
+    # Save old definition, if it exists.
+    _prepare_class_namespace(cls, name)
+
+    # Create function bodies for each passed method spec.
+    funcs: list[Decorator | FuncDef] = []
+    for item in items:
+        func, _sym = _add_method_by_spec(
+            api,
+            cls.info,
+            name=name,
+            spec=item,
+            is_classmethod=is_classmethod,
+            is_staticmethod=is_staticmethod,
+        )
+        if isinstance(func, FuncDef):
+            var = Var(func.name, func.type)
+            var.set_line(func.line)
+            func.is_decorated = True
+            func.deco_line = func.line
+
+            deco = Decorator(func, [], var)
+        else:
+            deco = func
+        deco.is_overload = True
+        funcs.append(deco)
+
+    # Create the final OverloadedFuncDef node:
+    overload_def = OverloadedFuncDef(funcs)
+    overload_def.info = cls.info
+    overload_def.is_class = is_classmethod
+    overload_def.is_static = is_staticmethod
+    sym = SymbolTableNode(MDEF, overload_def)
+    sym.plugin_generated = True
+
+    cls.info.names[name] = sym
+    cls.info.defn.defs.body.append(overload_def)
+    return overload_def
+
+
+def _prepare_class_namespace(cls: ClassDef, name: str) -> None:
     info = cls.info
+    assert info
 
     # First remove any previously generated methods with the same name
     # to avoid clashes and problems in the semantic analyzer.
@@ -235,6 +314,29 @@ def add_method_to_class(
         if sym.plugin_generated and isinstance(sym.node, FuncDef):
             cls.defs.body.remove(sym.node)
 
+    # NOTE: we would like the plugin generated node to dominate, but we still
+    # need to keep any existing definitions so they get semantically analyzed.
+    if name in info.names:
+        # Get a nice unique name instead.
+        r_name = get_unique_redefinition_name(name, info.names)
+        info.names[r_name] = info.names[name]
+
+
+def _add_method_by_spec(
+    api: SemanticAnalyzerPluginInterface | CheckerPluginInterface,
+    info: TypeInfo,
+    name: str,
+    spec: MethodSpec,
+    *,
+    is_classmethod: bool,
+    is_staticmethod: bool,
+) -> tuple[FuncDef | Decorator, SymbolTableNode]:
+    args, return_type, self_type, tvar_defs = spec
+
+    assert not (
+        is_classmethod is True and is_staticmethod is True
+    ), "Can't add a new method that's both staticmethod and classmethod."
+
     if isinstance(api, SemanticAnalyzerPluginInterface):
         function_type = api.named_type("builtins.function")
     else:
@@ -258,8 +360,8 @@ def add_method_to_class(
         arg_kinds.append(arg.kind)
 
     signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type)
-    if tvar_def:
-        signature.variables = [tvar_def]
+    if tvar_defs:
+        signature.variables = tvar_defs
 
     func = FuncDef(name, args, Block([PassStmt()]))
     func.info = info
@@ -269,13 +371,6 @@ def add_method_to_class(
     func._fullname = info.fullname + "." + name
     func.line = info.line
 
-    # NOTE: we would like the plugin generated node to dominate, but we still
-    # need to keep any existing definitions so they get semantically analyzed.
-    if name in info.names:
-        # Get a nice unique name instead.
-        r_name = get_unique_redefinition_name(name, info.names)
-        info.names[r_name] = info.names[name]
-
     # Add decorator for is_staticmethod. It's unnecessary for is_classmethod.
     if is_staticmethod:
         func.is_decorated = True
@@ -286,12 +381,12 @@ def add_method_to_class(
         dec = Decorator(func, [], v)
         dec.line = info.line
         sym = SymbolTableNode(MDEF, dec)
-    else:
-        sym = SymbolTableNode(MDEF, func)
-    sym.plugin_generated = True
-    info.names[name] = sym
+        sym.plugin_generated = True
+        return dec, sym
 
-    info.defn.defs.body.append(func)
+    sym = SymbolTableNode(MDEF, func)
+    sym.plugin_generated = True
+    return func, sym
 
 
 def add_attribute_to_class(
@@ -304,7 +399,7 @@ def add_attribute_to_class(
     override_allow_incompatible: bool = False,
     fullname: str | None = None,
     is_classvar: bool = False,
-) -> None:
+) -> Var:
     """
     Adds a new attribute to a class definition.
     This currently only generates the symbol table entry and no corresponding AssignmentStatement
@@ -335,6 +430,7 @@ def add_attribute_to_class(
     info.names[name] = SymbolTableNode(
         MDEF, node, plugin_generated=True, no_serialize=no_serialize
     )
+    return node
 
 
 def deserialize_and_fixup_type(data: str | JsonDict, api: SemanticAnalyzerPluginInterface) -> Type:
diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test
index 9a0668f98c21..22374d09cf9f 100644
--- a/test-data/unit/check-custom-plugin.test
+++ b/test-data/unit/check-custom-plugin.test
@@ -1011,13 +1011,35 @@ class BaseAddMethod: pass
 class MyClass(BaseAddMethod):
     pass
 
-my_class = MyClass()
 reveal_type(MyClass.foo_classmethod)  # N: Revealed type is "def ()"
 reveal_type(MyClass.foo_staticmethod)  # N: Revealed type is "def (builtins.int) -> builtins.str"
+
+my_class = MyClass()
+reveal_type(my_class.foo_classmethod)  # N: Revealed type is "def ()"
+reveal_type(my_class.foo_staticmethod)  # N: Revealed type is "def (builtins.int) -> builtins.str"
 [file mypy.ini]
 \[mypy]
 plugins=<ROOT>/test-data/unit/plugins/add_classmethod.py
 
+[case testAddOverloadedMethodPlugin]
+# flags: --config-file tmp/mypy.ini
+class AddOverloadedMethod: pass
+
+class MyClass(AddOverloadedMethod):
+    pass
+
+reveal_type(MyClass.method)  # N: Revealed type is "Overload(def (self: __main__.MyClass, arg: builtins.int) -> builtins.str, def (self: __main__.MyClass, arg: builtins.str) -> builtins.int)"
+reveal_type(MyClass.clsmethod)  # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+reveal_type(MyClass.stmethod)  # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+
+my_class = MyClass()
+reveal_type(my_class.method)  # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+reveal_type(my_class.clsmethod)  # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+reveal_type(my_class.stmethod)  # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+[file mypy.ini]
+\[mypy]
+plugins=<ROOT>/test-data/unit/plugins/add_overloaded_method.py
+
 [case testCustomErrorCodePlugin]
 # flags: --config-file tmp/mypy.ini  --show-error-codes
 def main() -> int:
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index fcab0545b982..b4cd21aa552c 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -5935,6 +5935,44 @@ tmp/b.py:4: note: Revealed type is "def ()"
 tmp/b.py:5: note: Revealed type is "def (builtins.int) -> builtins.str"
 tmp/b.py:6: note: Revealed type is "def ()"
 tmp/b.py:7: note: Revealed type is "def (builtins.int) -> builtins.str"
+
+[case testIncrementalAddOverloadedMethodPlugin]
+# flags: --config-file tmp/mypy.ini
+import b
+
+[file mypy.ini]
+\[mypy]
+plugins=<ROOT>/test-data/unit/plugins/add_overloaded_method.py
+
+[file a.py]
+class AddOverloadedMethod: pass
+
+class MyClass(AddOverloadedMethod):
+    pass
+
+[file b.py]
+import a
+
+[file b.py.2]
+import a
+
+reveal_type(a.MyClass.method)
+reveal_type(a.MyClass.clsmethod)
+reveal_type(a.MyClass.stmethod)
+
+my_class = a.MyClass()
+reveal_type(my_class.method)
+reveal_type(my_class.clsmethod)
+reveal_type(my_class.stmethod)
+[rechecked b]
+[out2]
+tmp/b.py:3: note: Revealed type is "Overload(def (self: a.MyClass, arg: builtins.int) -> builtins.str, def (self: a.MyClass, arg: builtins.str) -> builtins.int)"
+tmp/b.py:4: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+tmp/b.py:5: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+tmp/b.py:8: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+tmp/b.py:9: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+tmp/b.py:10: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)"
+
 [case testGenericNamedTupleSerialization]
 import b
 [file a.py]
diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test
index c3295b79e4ed..5e77ff1d85e0 100644
--- a/test-data/unit/deps.test
+++ b/test-data/unit/deps.test
@@ -1387,12 +1387,13 @@ class B(A):
 <m.A.(abstract)> -> <m.B.__init__>, m
 <m.A.__dataclass_fields__> -> <m.B.__dataclass_fields__>
 <m.A.__init__> -> <m.B.__init__>, m.B.__init__
-<m.A.__mypy-replace> -> <m.B.__mypy-replace>, m.B.__mypy-replace
+<m.A.__mypy-replace> -> <m.B.__mypy-replace>, m, m.B.__mypy-replace
 <m.A.__new__> -> <m.B.__new__>
 <m.A.x> -> <m.B.x>
 <m.A.y> -> <m.B.y>
 <m.A> -> m, m.A, m.B
 <m.A[wildcard]> -> m
+<m.B.__mypy-replace> -> m
 <m.B.y> -> m
 <m.B> -> m.B
 <m.Z> -> m
@@ -1419,12 +1420,13 @@ class B(A):
 <m.A.__dataclass_fields__> -> <m.B.__dataclass_fields__>
 <m.A.__init__> -> <m.B.__init__>, m.B.__init__
 <m.A.__match_args__> -> <m.B.__match_args__>
-<m.A.__mypy-replace> -> <m.B.__mypy-replace>, m.B.__mypy-replace
+<m.A.__mypy-replace> -> <m.B.__mypy-replace>, m, m.B.__mypy-replace
 <m.A.__new__> -> <m.B.__new__>
 <m.A.x> -> <m.B.x>
 <m.A.y> -> <m.B.y>
 <m.A> -> m, m.A, m.B
 <m.A[wildcard]> -> m
+<m.B.__mypy-replace> -> m
 <m.B.y> -> m
 <m.B> -> m.B
 <m.Z> -> m
diff --git a/test-data/unit/plugins/add_overloaded_method.py b/test-data/unit/plugins/add_overloaded_method.py
new file mode 100644
index 000000000000..efda848f790c
--- /dev/null
+++ b/test-data/unit/plugins/add_overloaded_method.py
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+from typing import Callable
+
+from mypy.nodes import ARG_POS, Argument, Var
+from mypy.plugin import ClassDefContext, Plugin
+from mypy.plugins.common import MethodSpec, add_overloaded_method_to_class
+
+
+class OverloadedMethodPlugin(Plugin):
+    def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None:
+        if "AddOverloadedMethod" in fullname:
+            return add_overloaded_method_hook
+        return None
+
+
+def add_overloaded_method_hook(ctx: ClassDefContext) -> None:
+    add_overloaded_method_to_class(ctx.api, ctx.cls, "method", _generate_method_specs(ctx))
+    add_overloaded_method_to_class(
+        ctx.api, ctx.cls, "clsmethod", _generate_method_specs(ctx), is_classmethod=True
+    )
+    add_overloaded_method_to_class(
+        ctx.api, ctx.cls, "stmethod", _generate_method_specs(ctx), is_staticmethod=True
+    )
+
+
+def _generate_method_specs(ctx: ClassDefContext) -> list[MethodSpec]:
+    return [
+        MethodSpec(
+            args=[Argument(Var("arg"), ctx.api.named_type("builtins.int"), None, ARG_POS)],
+            return_type=ctx.api.named_type("builtins.str"),
+        ),
+        MethodSpec(
+            args=[Argument(Var("arg"), ctx.api.named_type("builtins.str"), None, ARG_POS)],
+            return_type=ctx.api.named_type("builtins.int"),
+        ),
+    ]
+
+
+def plugin(version: str) -> type[OverloadedMethodPlugin]:
+    return OverloadedMethodPlugin

From 9e520c38777267495642845f070be4383f50342d Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 11 Sep 2023 20:02:31 +0100
Subject: [PATCH 121/288] Allow TypedDict unpacking in Callable types (#16083)

Fixes https://github.com/python/mypy/issues/16082

Currently we only allow `Unpack` of a TypedDict when it appears in a
function definition. This PR also allows this in `Callable` types,
similarly to how we do this for variadic types.

Note this still doesn't allow having both variadic unpack and a
TypedDict unpack in the same `Callable`. Supporting this is tricky, so
let's not so this until people will actually ask for this. FWIW we can
always suggest callback protocols for such tricky cases.
---
 mypy/exprtotype.py                |  4 +++-
 mypy/fastparse.py                 |  2 +-
 mypy/semanal_typeargs.py          |  4 +++-
 mypy/typeanal.py                  | 13 ++++++++++++-
 mypy/types.py                     |  7 +++++--
 test-data/unit/check-varargs.test | 15 +++++++++++++++
 6 files changed, 39 insertions(+), 6 deletions(-)

diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py
index b82d35607ef1..5f0ef79acbd7 100644
--- a/mypy/exprtotype.py
+++ b/mypy/exprtotype.py
@@ -196,6 +196,8 @@ def expr_to_unanalyzed_type(
     elif isinstance(expr, EllipsisExpr):
         return EllipsisType(expr.line)
     elif allow_unpack and isinstance(expr, StarExpr):
-        return UnpackType(expr_to_unanalyzed_type(expr.expr, options, allow_new_syntax))
+        return UnpackType(
+            expr_to_unanalyzed_type(expr.expr, options, allow_new_syntax), from_star_syntax=True
+        )
     else:
         raise TypeTranslationError()
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index a96e697d40bf..fe158d468ce8 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -2041,7 +2041,7 @@ def visit_Attribute(self, n: Attribute) -> Type:
 
     # Used for Callable[[X *Ys, Z], R]
     def visit_Starred(self, n: ast3.Starred) -> Type:
-        return UnpackType(self.visit(n.value))
+        return UnpackType(self.visit(n.value), from_star_syntax=True)
 
     # List(expr* elts, expr_context ctx)
     def visit_List(self, n: ast3.List) -> Type:
diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py
index 3e11951376c9..ed04b30e90ba 100644
--- a/mypy/semanal_typeargs.py
+++ b/mypy/semanal_typeargs.py
@@ -214,7 +214,9 @@ def visit_unpack_type(self, typ: UnpackType) -> None:
             # Avoid extra errors if there were some errors already. Also interpret plain Any
             # as tuple[Any, ...] (this is better for the code in type checker).
             self.fail(
-                message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ
+                message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)),
+                typ.type,
+                code=codes.VALID_TYPE,
             )
         typ.type = self.named_type("builtins.tuple", [AnyType(TypeOfAny.from_error)])
 
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index e297f2bf1631..385c5d35d67f 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -961,7 +961,7 @@ def visit_unpack_type(self, t: UnpackType) -> Type:
         if not self.allow_unpack:
             self.fail(message_registry.INVALID_UNPACK_POSITION, t.type, code=codes.VALID_TYPE)
             return AnyType(TypeOfAny.from_error)
-        return UnpackType(self.anal_type(t.type))
+        return UnpackType(self.anal_type(t.type), from_star_syntax=t.from_star_syntax)
 
     def visit_parameters(self, t: Parameters) -> Type:
         raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars")
@@ -969,6 +969,7 @@ def visit_parameters(self, t: Parameters) -> Type:
     def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type:
         # Every Callable can bind its own type variables, if they're not in the outer scope
         with self.tvar_scope_frame():
+            unpacked_kwargs = False
             if self.defining_alias:
                 variables = t.variables
             else:
@@ -996,6 +997,15 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type:
                         )
                         validated_args.append(AnyType(TypeOfAny.from_error))
                     else:
+                        if nested and isinstance(at, UnpackType) and i == star_index:
+                            # TODO: it would be better to avoid this get_proper_type() call.
+                            p_at = get_proper_type(at.type)
+                            if isinstance(p_at, TypedDictType) and not at.from_star_syntax:
+                                # Automatically detect Unpack[Foo] in Callable as backwards
+                                # compatible syntax for **Foo, if Foo is a TypedDict.
+                                at = p_at
+                                arg_kinds[i] = ARG_STAR2
+                                unpacked_kwargs = True
                         validated_args.append(at)
                 arg_types = validated_args
             # If there were multiple (invalid) unpacks, the arg types list will become shorter,
@@ -1013,6 +1023,7 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type:
                 fallback=(t.fallback if t.fallback.type else self.named_type("builtins.function")),
                 variables=self.anal_var_defs(variables),
                 type_guard=special,
+                unpack_kwargs=unpacked_kwargs,
             )
         return ret
 
diff --git a/mypy/types.py b/mypy/types.py
index 04d90c9dc124..22fcd601d6a0 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1053,11 +1053,14 @@ class UnpackType(ProperType):
     wild west, technically anything can be present in the wrapped type.
     """
 
-    __slots__ = ["type"]
+    __slots__ = ["type", "from_star_syntax"]
 
-    def __init__(self, typ: Type, line: int = -1, column: int = -1) -> None:
+    def __init__(
+        self, typ: Type, line: int = -1, column: int = -1, from_star_syntax: bool = False
+    ) -> None:
         super().__init__(line, column)
         self.type = typ
+        self.from_star_syntax = from_star_syntax
 
     def accept(self, visitor: TypeVisitor[T]) -> T:
         return visitor.visit_unpack_type(self)
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
index ef2c3c57fad5..41668e991972 100644
--- a/test-data/unit/check-varargs.test
+++ b/test-data/unit/check-varargs.test
@@ -1079,3 +1079,18 @@ class C:
 class D:
     def __init__(self, **kwds: Unpack[int, str]) -> None: ...  # E: Unpack[...] requires exactly one type argument
 [builtins fixtures/dict.pyi]
+
+[case testUnpackInCallableType]
+from typing import Callable
+from typing_extensions import Unpack, TypedDict
+
+class TD(TypedDict):
+    key: str
+    value: str
+
+foo: Callable[[Unpack[TD]], None]
+foo(key="yes", value=42)  # E: Argument "value" has incompatible type "int"; expected "str"
+foo(key="yes", value="ok")
+
+bad: Callable[[*TD], None]  # E: "TD" cannot be unpacked (must be tuple or TypeVarTuple)
+[builtins fixtures/dict.pyi]

From 66fbf5b526ad8cfa127dd5cca68dcb2f770b1dd7 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Tue, 12 Sep 2023 18:19:53 +0100
Subject: [PATCH 122/288] [mypyc] Make tuple packing and unpacking more
 efficient (#16022)

Previously returning a tuple from a function resulted in redundant
increfs and decrefs
for each item, and similarly unpacking the returned tuple in an
assignment had extra
incref/decref pair per item. This PR introduces these changes to make
this better:
* Creating a tuple steals the items always.
* Accessing a tuple item optionally borrows the item.
* A borrowed reference can be turned into a regular one using the new
`Unborrow` op.
* The no-op `KeepAlive` op can steal the operands to avoid decrefing the
operands.

Assignment from tuple now uses the three final features to avoid increfs
and decrefs
when unpacking a tuple in assignment. The docstrings in this PR contain
additional
explanation of how this works.

In a micro-benchmark this improved performance by about 2-5%. In
realistic examples
the impact is likely small, but every little helps.

Here is an example where this helps:
```
def f() -> tuple[C, C]:
    return C(), C()  # Avoid 2 increfs and 2 decrefs

def g() -> None:
    x, y = f()  # Avoid 2 increfs and 2 decrefs
    ...
```

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 mypyc/analysis/dataflow.py              |  4 ++
 mypyc/analysis/ircheck.py               |  4 ++
 mypyc/analysis/selfleaks.py             |  4 ++
 mypyc/codegen/emitfunc.py               | 11 +++-
 mypyc/ir/ops.py                         | 67 ++++++++++++++++++++++++-
 mypyc/ir/pprint.py                      | 14 +++++-
 mypyc/irbuild/ll_builder.py             |  3 ++
 mypyc/irbuild/statement.py              | 23 +++++++++
 mypyc/test-data/irbuild-statements.test | 29 ++++++-----
 mypyc/test-data/refcount.test           | 60 ++++++++++++++++++++++
 10 files changed, 200 insertions(+), 19 deletions(-)

diff --git a/mypyc/analysis/dataflow.py b/mypyc/analysis/dataflow.py
index ee2ff06b0f03..cade0c823962 100644
--- a/mypyc/analysis/dataflow.py
+++ b/mypyc/analysis/dataflow.py
@@ -46,6 +46,7 @@
     Truncate,
     TupleGet,
     TupleSet,
+    Unborrow,
     Unbox,
     Unreachable,
     Value,
@@ -272,6 +273,9 @@ def visit_load_address(self, op: LoadAddress) -> GenAndKill[T]:
     def visit_keep_alive(self, op: KeepAlive) -> GenAndKill[T]:
         return self.visit_register_op(op)
 
+    def visit_unborrow(self, op: Unborrow) -> GenAndKill[T]:
+        return self.visit_register_op(op)
+
 
 class DefinedVisitor(BaseAnalysisVisitor[Value]):
     """Visitor for finding defined registers.
diff --git a/mypyc/analysis/ircheck.py b/mypyc/analysis/ircheck.py
index 2e6b7320e898..a31b1517b036 100644
--- a/mypyc/analysis/ircheck.py
+++ b/mypyc/analysis/ircheck.py
@@ -44,6 +44,7 @@
     Truncate,
     TupleGet,
     TupleSet,
+    Unborrow,
     Unbox,
     Unreachable,
     Value,
@@ -422,3 +423,6 @@ def visit_load_address(self, op: LoadAddress) -> None:
 
     def visit_keep_alive(self, op: KeepAlive) -> None:
         pass
+
+    def visit_unborrow(self, op: Unborrow) -> None:
+        pass
diff --git a/mypyc/analysis/selfleaks.py b/mypyc/analysis/selfleaks.py
index 288c366e50e5..80c2bc348bc2 100644
--- a/mypyc/analysis/selfleaks.py
+++ b/mypyc/analysis/selfleaks.py
@@ -40,6 +40,7 @@
     Truncate,
     TupleGet,
     TupleSet,
+    Unborrow,
     Unbox,
     Unreachable,
 )
@@ -184,6 +185,9 @@ def visit_load_address(self, op: LoadAddress) -> GenAndKill:
     def visit_keep_alive(self, op: KeepAlive) -> GenAndKill:
         return CLEAN
 
+    def visit_unborrow(self, op: Unborrow) -> GenAndKill:
+        return CLEAN
+
     def check_register_op(self, op: RegisterOp) -> GenAndKill:
         if any(src is self.self_reg for src in op.sources()):
             return DIRTY
diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py
index b4d31544b196..3bce84d3ea59 100644
--- a/mypyc/codegen/emitfunc.py
+++ b/mypyc/codegen/emitfunc.py
@@ -55,6 +55,7 @@
     Truncate,
     TupleGet,
     TupleSet,
+    Unborrow,
     Unbox,
     Unreachable,
     Value,
@@ -260,7 +261,6 @@ def visit_tuple_set(self, op: TupleSet) -> None:
         else:
             for i, item in enumerate(op.items):
                 self.emit_line(f"{dest}.f{i} = {self.reg(item)};")
-        self.emit_inc_ref(dest, tuple_type)
 
     def visit_assign(self, op: Assign) -> None:
         dest = self.reg(op.dest)
@@ -499,7 +499,8 @@ def visit_tuple_get(self, op: TupleGet) -> None:
         dest = self.reg(op)
         src = self.reg(op.src)
         self.emit_line(f"{dest} = {src}.f{op.index};")
-        self.emit_inc_ref(dest, op.type)
+        if not op.is_borrowed:
+            self.emit_inc_ref(dest, op.type)
 
     def get_dest_assign(self, dest: Value) -> str:
         if not dest.is_void:
@@ -746,6 +747,12 @@ def visit_keep_alive(self, op: KeepAlive) -> None:
         # This is a no-op.
         pass
 
+    def visit_unborrow(self, op: Unborrow) -> None:
+        # This is a no-op that propagates the source value.
+        dest = self.reg(op)
+        src = self.reg(op.src)
+        self.emit_line(f"{dest} = {src};")
+
     # Helpers
 
     def label(self, label: BasicBlock) -> str:
diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py
index 2d64cc79d822..04c50d1e2841 100644
--- a/mypyc/ir/ops.py
+++ b/mypyc/ir/ops.py
@@ -792,6 +792,9 @@ def __init__(self, items: list[Value], line: int) -> None:
     def sources(self) -> list[Value]:
         return self.items.copy()
 
+    def stolen(self) -> list[Value]:
+        return self.items.copy()
+
     def accept(self, visitor: OpVisitor[T]) -> T:
         return visitor.visit_tuple_set(self)
 
@@ -801,13 +804,14 @@ class TupleGet(RegisterOp):
 
     error_kind = ERR_NEVER
 
-    def __init__(self, src: Value, index: int, line: int = -1) -> None:
+    def __init__(self, src: Value, index: int, line: int = -1, *, borrow: bool = False) -> None:
         super().__init__(line)
         self.src = src
         self.index = index
         assert isinstance(src.type, RTuple), "TupleGet only operates on tuples"
         assert index >= 0
         self.type = src.type.types[index]
+        self.is_borrowed = borrow
 
     def sources(self) -> list[Value]:
         return [self.src]
@@ -1387,21 +1391,76 @@ class KeepAlive(RegisterOp):
     If we didn't have "keep_alive x", x could be freed immediately
     after taking the address of 'item', resulting in a read after free
     on the second line.
+
+    If 'steal' is true, the value is considered to be stolen at
+    this op, i.e. it won't be decref'd. You need to ensure that
+    the value is freed otherwise, perhaps by using borrowing
+    followed by Unborrow.
+
+    Be careful with steal=True -- this can cause memory leaks.
     """
 
     error_kind = ERR_NEVER
 
-    def __init__(self, src: list[Value]) -> None:
+    def __init__(self, src: list[Value], *, steal: bool = False) -> None:
         assert src
         self.src = src
+        self.steal = steal
 
     def sources(self) -> list[Value]:
         return self.src.copy()
 
+    def stolen(self) -> list[Value]:
+        if self.steal:
+            return self.src.copy()
+        return []
+
     def accept(self, visitor: OpVisitor[T]) -> T:
         return visitor.visit_keep_alive(self)
 
 
+class Unborrow(RegisterOp):
+    """A no-op op to create a regular reference from a borrowed one.
+
+    Borrowed references can only be used temporarily and the reference
+    counts won't be managed. This value will be refcounted normally.
+
+    This is mainly useful if you split an aggregate value, such as
+    a tuple, into components using borrowed values (to avoid increfs),
+    and want to treat the components as sharing the original managed
+    reference. You'll also need to use KeepAlive with steal=True to
+    "consume" the original tuple reference:
+
+      # t is a 2-tuple
+      r0 = borrow t[0]
+      r1 = borrow t[1]
+      r2 = unborrow r0
+      r3 = unborrow r1
+      # now (r2, r3) represent the tuple as separate items, and the
+      # original tuple can be considered dead and available to be
+      # stolen
+      keep_alive steal t
+
+    Be careful with this -- this can easily cause double freeing.
+    """
+
+    error_kind = ERR_NEVER
+
+    def __init__(self, src: Value) -> None:
+        assert src.is_borrowed
+        self.src = src
+        self.type = src.type
+
+    def sources(self) -> list[Value]:
+        return [self.src]
+
+    def stolen(self) -> list[Value]:
+        return []
+
+    def accept(self, visitor: OpVisitor[T]) -> T:
+        return visitor.visit_unborrow(self)
+
+
 @trait
 class OpVisitor(Generic[T]):
     """Generic visitor over ops (uses the visitor design pattern)."""
@@ -1548,6 +1607,10 @@ def visit_load_address(self, op: LoadAddress) -> T:
     def visit_keep_alive(self, op: KeepAlive) -> T:
         raise NotImplementedError
 
+    @abstractmethod
+    def visit_unborrow(self, op: Unborrow) -> T:
+        raise NotImplementedError
+
 
 # TODO: Should the following definition live somewhere else?
 
diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py
index c86060c49594..5578049256f1 100644
--- a/mypyc/ir/pprint.py
+++ b/mypyc/ir/pprint.py
@@ -51,6 +51,7 @@
     Truncate,
     TupleGet,
     TupleSet,
+    Unborrow,
     Unbox,
     Unreachable,
     Value,
@@ -153,7 +154,7 @@ def visit_init_static(self, op: InitStatic) -> str:
         return self.format("%s = %r :: %s", name, op.value, op.namespace)
 
     def visit_tuple_get(self, op: TupleGet) -> str:
-        return self.format("%r = %r[%d]", op, op.src, op.index)
+        return self.format("%r = %s%r[%d]", op, self.borrow_prefix(op), op.src, op.index)
 
     def visit_tuple_set(self, op: TupleSet) -> str:
         item_str = ", ".join(self.format("%r", item) for item in op.items)
@@ -274,7 +275,16 @@ def visit_load_address(self, op: LoadAddress) -> str:
             return self.format("%r = load_address %s", op, op.src)
 
     def visit_keep_alive(self, op: KeepAlive) -> str:
-        return self.format("keep_alive %s" % ", ".join(self.format("%r", v) for v in op.src))
+        if op.steal:
+            steal = "steal "
+        else:
+            steal = ""
+        return self.format(
+            "keep_alive {}{}".format(steal, ", ".join(self.format("%r", v) for v in op.src))
+        )
+
+    def visit_unborrow(self, op: Unborrow) -> str:
+        return self.format("%r = unborrow %r", op, op.src)
 
     # Helpers
 
diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py
index 984b6a4deec0..d1ea91476a66 100644
--- a/mypyc/irbuild/ll_builder.py
+++ b/mypyc/irbuild/ll_builder.py
@@ -266,6 +266,9 @@ def goto_and_activate(self, block: BasicBlock) -> None:
         self.goto(block)
         self.activate_block(block)
 
+    def keep_alive(self, values: list[Value], *, steal: bool = False) -> None:
+        self.add(KeepAlive(values, steal=steal))
+
     def push_error_handler(self, handler: BasicBlock | None) -> None:
         self.error_handlers.append(handler)
 
diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py
index 63297618108c..d7e01456139d 100644
--- a/mypyc/irbuild/statement.py
+++ b/mypyc/irbuild/statement.py
@@ -59,11 +59,13 @@
     Register,
     Return,
     TupleGet,
+    Unborrow,
     Unreachable,
     Value,
 )
 from mypyc.ir.rtypes import (
     RInstance,
+    RTuple,
     c_pyssize_t_rprimitive,
     exc_rtuple,
     is_tagged,
@@ -183,8 +185,29 @@ def transform_assignment_stmt(builder: IRBuilder, stmt: AssignmentStmt) -> None:
 
     line = stmt.rvalue.line
     rvalue_reg = builder.accept(stmt.rvalue)
+
     if builder.non_function_scope() and stmt.is_final_def:
         builder.init_final_static(first_lvalue, rvalue_reg)
+
+    # Special-case multiple assignments like 'x, y = expr' to reduce refcount ops.
+    if (
+        isinstance(first_lvalue, (TupleExpr, ListExpr))
+        and isinstance(rvalue_reg.type, RTuple)
+        and len(rvalue_reg.type.types) == len(first_lvalue.items)
+        and len(lvalues) == 1
+        and all(is_simple_lvalue(item) for item in first_lvalue.items)
+        and any(t.is_refcounted for t in rvalue_reg.type.types)
+    ):
+        n = len(first_lvalue.items)
+        for i in range(n):
+            target = builder.get_assignment_target(first_lvalue.items[i])
+            rvalue_item = builder.add(TupleGet(rvalue_reg, i, borrow=True))
+            rvalue_item = builder.add(Unborrow(rvalue_item))
+            builder.assign(target, rvalue_item, line)
+        builder.builder.keep_alive([rvalue_reg], steal=True)
+        builder.flush_keep_alives()
+        return
+
     for lvalue in lvalues:
         target = builder.get_assignment_target(lvalue)
         builder.assign(target, rvalue_reg, line)
diff --git a/mypyc/test-data/irbuild-statements.test b/mypyc/test-data/irbuild-statements.test
index 062abd47d163..490b41336e88 100644
--- a/mypyc/test-data/irbuild-statements.test
+++ b/mypyc/test-data/irbuild-statements.test
@@ -502,16 +502,16 @@ L0:
 [case testMultipleAssignmentBasicUnpacking]
 from typing import Tuple, Any
 
-def from_tuple(t: Tuple[int, str]) -> None:
+def from_tuple(t: Tuple[bool, None]) -> None:
     x, y = t
 
 def from_any(a: Any) -> None:
     x, y = a
 [out]
 def from_tuple(t):
-    t :: tuple[int, str]
-    r0, x :: int
-    r1, y :: str
+    t :: tuple[bool, None]
+    r0, x :: bool
+    r1, y :: None
 L0:
     r0 = t[0]
     x = r0
@@ -563,16 +563,19 @@ def from_any(a: Any) -> None:
 [out]
 def from_tuple(t):
     t :: tuple[int, object]
-    r0 :: int
-    r1, x, r2 :: object
-    r3, y :: int
+    r0, r1 :: int
+    r2, x, r3, r4 :: object
+    r5, y :: int
 L0:
-    r0 = t[0]
-    r1 = box(int, r0)
-    x = r1
-    r2 = t[1]
-    r3 = unbox(int, r2)
-    y = r3
+    r0 = borrow t[0]
+    r1 = unborrow r0
+    r2 = box(int, r1)
+    x = r2
+    r3 = borrow t[1]
+    r4 = unborrow r3
+    r5 = unbox(int, r4)
+    y = r5
+    keep_alive steal t
     return 1
 def from_any(a):
     a, r0, r1 :: object
diff --git a/mypyc/test-data/refcount.test b/mypyc/test-data/refcount.test
index 3db4caa39566..0f2c134ae21e 100644
--- a/mypyc/test-data/refcount.test
+++ b/mypyc/test-data/refcount.test
@@ -656,6 +656,66 @@ L1:
 L2:
     return 4
 
+[case testReturnTuple]
+from typing import Tuple
+
+class C: pass
+def f() -> Tuple[C, C]:
+    a = C()
+    b = C()
+    return a, b
+[out]
+def f():
+    r0, a, r1, b :: __main__.C
+    r2 :: tuple[__main__.C, __main__.C]
+L0:
+    r0 = C()
+    a = r0
+    r1 = C()
+    b = r1
+    r2 = (a, b)
+    return r2
+
+[case testDecomposeTuple]
+from typing import Tuple
+
+class C:
+    a: int
+
+def f() -> int:
+    x, y = g()
+    return x.a + y.a
+
+def g() -> Tuple[C, C]:
+    return C(), C()
+[out]
+def f():
+    r0 :: tuple[__main__.C, __main__.C]
+    r1, r2, x, r3, r4, y :: __main__.C
+    r5, r6, r7 :: int
+L0:
+    r0 = g()
+    r1 = borrow r0[0]
+    r2 = unborrow r1
+    x = r2
+    r3 = borrow r0[1]
+    r4 = unborrow r3
+    y = r4
+    r5 = borrow x.a
+    r6 = borrow y.a
+    r7 = CPyTagged_Add(r5, r6)
+    dec_ref x
+    dec_ref y
+    return r7
+def g():
+    r0, r1 :: __main__.C
+    r2 :: tuple[__main__.C, __main__.C]
+L0:
+    r0 = C()
+    r1 = C()
+    r2 = (r0, r1)
+    return r2
+
 [case testUnicodeLiteral]
 def f() -> str:
     return "some string"

From b3275572ec9b65d0a1b5157c5f73ad4004a356b4 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 13 Sep 2023 23:41:08 +0100
Subject: [PATCH 123/288] Subtyping and inference of user defined variadic
 types (#16076)

The second part of support for user defined variadic types comes as a
single PR, it was hard to split into smaller parts. This part covers
subtyping and inference (and relies on the first part: type analysis,
normalization, and expansion, concluded by
https://github.com/python/mypy/pull/15991). Note btw that the third (and
last) part that covers actually using all the stuff in `checkexpr.py`
will likely come as several smaller PRs.

Some comments on this PR:
* First good news: it looks like instances subtyping/inference can be
handled in a really simple way, we just need to find correct type
arguments mapping for each type variable, and perform procedures
argument by argument (note this heavily relies on the normalization).
Also callable subtyping inference for variadic items effectively defers
to corresponding tuple types. This way all code paths will ultimately go
through variadic tuple subtyping/inference (there is still a bunch of
boilerplate to do the mapping, but it is quite simple).
* Second some bad news: a lot of edge cases involving `*tuple[X, ...]`
were missing everywhere (even couple cases in the code I touched
before). I added all that were either simple or important. We can handle
more if users will ask, since it is quite tricky.
* Note that I handle variadic tuples essentially as infinite unions, the
core of the logic for this (and for most of this PR FWIW) is in
`variadic_tuple_subtype()`.
* Previously `Foo[*tuple[int, ...]]` was considered a subtype of
`Foo[int, int]`. I think this is wrong. I didn't find where this is
required in the PEP (see one case below however), and mypy currently
considers `tuple[int, ...]` not a subtype of `tuple[int, int]` (vice
versa are subtypes), and similarly `(*args: int)` vs `(x: int, y: int)`
for callables. Because of the logic I described in the first comment,
the same logic now uniformly applies to instances as well.
* Note however the PEP requires special casing of `Foo[*tuple[Any,
...]]` (equivalent to bare `Foo`), and I agree we should do this. I
added a minimal special case for this. Note we also do this for
callables as well (`*args: Any` is very different from `*args: object`).
And I think we should special case `tuple[Any, ...] <: tuple[int, int]`
as well. In the future we can even extend the special casing to
`tuple[int, *tuple[Any, ...], int]` in the spirit of
https://github.com/python/mypy/pull/15913
* In this PR I specifically only handle the PEP required item from above
for instances. For plain tuples I left a TODO, @hauntsaninja may
implement it since it is needed for other unrelated PR.
* I make the default upper bound for `TypeVarTupleType` to be
`tuple[object, ...]`. I think it can never be `object` (and this
simplifies some subtyping corner cases).
* TBH I didn't look into callables subtyping/inference very deeply
(unlike instances and tuples), if needed we can improve their handling
later.
* Note I remove some failing unit tests because they test non-nomralized
forms that should never appear now. We should probably add some more unit
tests, but TBH I am quite tired now.
---
 mypy/constraints.py                     | 231 +++++++++----------
 mypy/erasetype.py                       |  11 +-
 mypy/expandtype.py                      |   3 +-
 mypy/fixup.py                           |  17 +-
 mypy/join.py                            | 154 ++++++++++++-
 mypy/meet.py                            | 122 +++++++++-
 mypy/semanal.py                         |   3 +-
 mypy/semanal_typeargs.py                |   3 +-
 mypy/solve.py                           |   8 +-
 mypy/subtypes.py                        | 241 ++++++++++++--------
 mypy/test/testconstraints.py            |  42 +---
 mypy/test/testsubtypes.py               |  83 +------
 mypy/test/testtypes.py                  |  77 +++++++
 mypy/test/typefixture.py                |  11 +-
 mypy/typeops.py                         |   7 +-
 mypy/typevartuples.py                   | 134 -----------
 test-data/unit/check-incremental.test   |  19 ++
 test-data/unit/check-typevar-tuple.test | 285 +++++++++++++++++++++++-
 test-data/unit/semanal-types.test       |   7 +-
 19 files changed, 943 insertions(+), 515 deletions(-)

diff --git a/mypy/constraints.py b/mypy/constraints.py
index 0e59b5459fd4..0524e38f9643 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -2,7 +2,7 @@
 
 from __future__ import annotations
 
-from typing import TYPE_CHECKING, Final, Iterable, List, Sequence, cast
+from typing import TYPE_CHECKING, Final, Iterable, List, Sequence
 
 import mypy.subtypes
 import mypy.typeops
@@ -58,7 +58,6 @@
 )
 from mypy.types_utils import is_union_with_any
 from mypy.typestate import type_state
-from mypy.typevartuples import extract_unpack, split_with_mapped_and_template
 
 if TYPE_CHECKING:
     from mypy.infer import ArgumentInferContext
@@ -745,28 +744,23 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                 tvars = mapped.type.defn.type_vars
 
                 if instance.type.has_type_var_tuple_type:
+                    # Variadic types need special handling to map each type argument to
+                    # the correct corresponding type variable.
                     assert instance.type.type_var_tuple_prefix is not None
                     assert instance.type.type_var_tuple_suffix is not None
-                    assert mapped.type.type_var_tuple_prefix is not None
-                    assert mapped.type.type_var_tuple_suffix is not None
-
-                    unpack_constraints, instance_args, mapped_args = build_constraints_for_unpack(
-                        instance.args,
-                        instance.type.type_var_tuple_prefix,
-                        instance.type.type_var_tuple_suffix,
-                        mapped.args,
-                        mapped.type.type_var_tuple_prefix,
-                        mapped.type.type_var_tuple_suffix,
-                        self.direction,
+                    prefix_len = instance.type.type_var_tuple_prefix
+                    suffix_len = instance.type.type_var_tuple_suffix
+                    tvt = instance.type.defn.type_vars[prefix_len]
+                    assert isinstance(tvt, TypeVarTupleType)
+                    fallback = tvt.tuple_fallback
+                    i_prefix, i_middle, i_suffix = split_with_prefix_and_suffix(
+                        instance.args, prefix_len, suffix_len
                     )
-                    res.extend(unpack_constraints)
-
-                    tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix(
-                        tuple(tvars),
-                        instance.type.type_var_tuple_prefix,
-                        instance.type.type_var_tuple_suffix,
+                    m_prefix, m_middle, m_suffix = split_with_prefix_and_suffix(
+                        mapped.args, prefix_len, suffix_len
                     )
-                    tvars = cast("list[TypeVarLikeType]", list(tvars_prefix + tvars_suffix))
+                    instance_args = i_prefix + (TupleType(list(i_middle), fallback),) + i_suffix
+                    mapped_args = m_prefix + (TupleType(list(m_middle), fallback),) + m_suffix
                 else:
                     mapped_args = mapped.args
                     instance_args = instance.args
@@ -806,44 +800,38 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                             )
                             res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix))
                             res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix))
-                    else:
-                        # This case should have been handled above.
-                        assert not isinstance(tvar, TypeVarTupleType)
+                    elif isinstance(tvar, TypeVarTupleType):
+                        # Handle variadic type variables covariantly for consistency.
+                        res.extend(infer_constraints(mapped_arg, instance_arg, self.direction))
 
                 return res
             elif self.direction == SUPERTYPE_OF and instance.type.has_base(template.type.fullname):
                 mapped = map_instance_to_supertype(instance, template.type)
                 tvars = template.type.defn.type_vars
                 if template.type.has_type_var_tuple_type:
-                    assert mapped.type.type_var_tuple_prefix is not None
-                    assert mapped.type.type_var_tuple_suffix is not None
+                    # Variadic types need special handling to map each type argument to
+                    # the correct corresponding type variable.
                     assert template.type.type_var_tuple_prefix is not None
                     assert template.type.type_var_tuple_suffix is not None
-
-                    unpack_constraints, mapped_args, template_args = build_constraints_for_unpack(
-                        mapped.args,
-                        mapped.type.type_var_tuple_prefix,
-                        mapped.type.type_var_tuple_suffix,
-                        template.args,
-                        template.type.type_var_tuple_prefix,
-                        template.type.type_var_tuple_suffix,
-                        self.direction,
+                    prefix_len = template.type.type_var_tuple_prefix
+                    suffix_len = template.type.type_var_tuple_suffix
+                    tvt = template.type.defn.type_vars[prefix_len]
+                    assert isinstance(tvt, TypeVarTupleType)
+                    fallback = tvt.tuple_fallback
+                    t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix(
+                        template.args, prefix_len, suffix_len
                     )
-                    res.extend(unpack_constraints)
-
-                    tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix(
-                        tuple(tvars),
-                        template.type.type_var_tuple_prefix,
-                        template.type.type_var_tuple_suffix,
+                    m_prefix, m_middle, m_suffix = split_with_prefix_and_suffix(
+                        mapped.args, prefix_len, suffix_len
                     )
-                    tvars = cast("list[TypeVarLikeType]", list(tvars_prefix + tvars_suffix))
+                    template_args = t_prefix + (TupleType(list(t_middle), fallback),) + t_suffix
+                    mapped_args = m_prefix + (TupleType(list(m_middle), fallback),) + m_suffix
                 else:
                     mapped_args = mapped.args
                     template_args = template.args
                 # N.B: We use zip instead of indexing because the lengths might have
                 # mismatches during daemon reprocessing.
                 for tvar, mapped_arg, template_arg in zip(tvars, mapped_args, template_args):
-                    assert not isinstance(tvar, TypeVarTupleType)
                     if isinstance(tvar, TypeVarType):
                         # The constraints for generic type parameters depend on variance.
                         # Include constraints from both directions if invariant.
@@ -878,9 +866,9 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
                             )
                             res.append(Constraint(template_arg, SUBTYPE_OF, suffix))
                             res.append(Constraint(template_arg, SUPERTYPE_OF, suffix))
-                    else:
-                        # This case should have been handled above.
-                        assert not isinstance(tvar, TypeVarTupleType)
+                    elif isinstance(tvar, TypeVarTupleType):
+                        # Handle variadic type variables covariantly for consistency.
+                        res.extend(infer_constraints(template_arg, mapped_arg, self.direction))
                 return res
             if (
                 template.type.is_protocol
@@ -1049,7 +1037,8 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                         )
                         res.extend(unpack_constraints)
                     else:
-                        # Negate direction due to function argument type contravariance.
+                        # TODO: do we need some special-casing when unpack is present in actual
+                        # callable but not in template callable?
                         res.extend(
                             infer_callable_arguments_constraints(template, cactual, self.direction)
                         )
@@ -1170,11 +1159,29 @@ def visit_tuple_type(self, template: TupleType) -> list[Constraint]:
             res: list[Constraint] = []
             if unpack_index is not None:
                 if is_varlength_tuple:
+                    # Variadic tuple can be only a supertype of a tuple type, but even if
+                    # direction is opposite, inferring something may give better error messages.
                     unpack_type = template.items[unpack_index]
                     assert isinstance(unpack_type, UnpackType)
-                    unpacked_type = unpack_type.type
-                    assert isinstance(unpacked_type, TypeVarTupleType)
-                    return [Constraint(type_var=unpacked_type, op=self.direction, target=actual)]
+                    unpacked_type = get_proper_type(unpack_type.type)
+                    if isinstance(unpacked_type, TypeVarTupleType):
+                        res = [
+                            Constraint(type_var=unpacked_type, op=self.direction, target=actual)
+                        ]
+                    else:
+                        assert (
+                            isinstance(unpacked_type, Instance)
+                            and unpacked_type.type.fullname == "builtins.tuple"
+                        )
+                        res = infer_constraints(unpacked_type, actual, self.direction)
+                    assert isinstance(actual, Instance)  # ensured by is_varlength_tuple == True
+                    for i, ti in enumerate(template.items):
+                        if i == unpack_index:
+                            # This one we just handled above.
+                            continue
+                        # For Tuple[T, *Ts, S] <: tuple[X, ...] infer also T <: X and S <: X.
+                        res.extend(infer_constraints(ti, actual.args[0], self.direction))
+                    return res
                 else:
                     assert isinstance(actual, TupleType)
                     unpack_constraints = build_constraints_for_simple_unpack(
@@ -1184,8 +1191,36 @@ def visit_tuple_type(self, template: TupleType) -> list[Constraint]:
                     template_items: tuple[Type, ...] = ()
                     res.extend(unpack_constraints)
             elif isinstance(actual, TupleType):
-                actual_items = tuple(actual.items)
-                template_items = tuple(template.items)
+                a_unpack_index = find_unpack_in_list(actual.items)
+                if a_unpack_index is not None:
+                    # The case where template tuple doesn't have an unpack, but actual tuple
+                    # has an unpack. We can infer something if actual unpack is a variadic tuple.
+                    # Tuple[T, S, U] <: tuple[X, *tuple[Y, ...], Z] => T <: X, S <: Y, U <: Z.
+                    a_unpack = actual.items[a_unpack_index]
+                    assert isinstance(a_unpack, UnpackType)
+                    a_unpacked = get_proper_type(a_unpack.type)
+                    if len(actual.items) + 1 <= len(template.items):
+                        a_prefix_len = a_unpack_index
+                        a_suffix_len = len(actual.items) - a_unpack_index - 1
+                        t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix(
+                            tuple(template.items), a_prefix_len, a_suffix_len
+                        )
+                        actual_items = tuple(actual.items[:a_prefix_len])
+                        if a_suffix_len:
+                            actual_items += tuple(actual.items[-a_suffix_len:])
+                        template_items = t_prefix + t_suffix
+                        if isinstance(a_unpacked, Instance):
+                            assert a_unpacked.type.fullname == "builtins.tuple"
+                            for tm in t_middle:
+                                res.extend(
+                                    infer_constraints(tm, a_unpacked.args[0], self.direction)
+                                )
+                    else:
+                        actual_items = ()
+                        template_items = ()
+                else:
+                    actual_items = tuple(actual.items)
+                    template_items = tuple(template.items)
             else:
                 return res
 
@@ -1236,8 +1271,13 @@ def visit_type_alias_type(self, template: TypeAliasType) -> list[Constraint]:
     def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> list[Constraint]:
         res: list[Constraint] = []
         for t in types:
-            if isinstance(t, UnpackType) and isinstance(t.type, TypeVarTupleType):
-                res.append(Constraint(t.type, self.direction, any_type))
+            if isinstance(t, UnpackType):
+                if isinstance(t.type, TypeVarTupleType):
+                    res.append(Constraint(t.type, self.direction, any_type))
+                else:
+                    unpacked = get_proper_type(t.type)
+                    assert isinstance(unpacked, Instance)
+                    res.extend(infer_constraints(unpacked, any_type, self.direction))
             else:
                 # Note that we ignore variance and simply always use the
                 # original direction. This is because for Any targets direction is
@@ -1374,9 +1414,8 @@ def build_constraints_for_simple_unpack(
         templates: T1, T2, Ts, Ts, Ts, ...
         actuals:   A1, As, As, As, ...
 
-    Note: this function can only be called for builtin variadic constructors: Tuple and Callable,
-    for Instances variance depends on position, and a much more complex function
-    build_constraints_for_unpack() should be used.
+    Note: this function can only be called for builtin variadic constructors: Tuple and Callable.
+    For instances, you should first find correct type argument mapping.
     """
     template_unpack = find_unpack_in_list(template_args)
     assert template_unpack is not None
@@ -1409,7 +1448,8 @@ def build_constraints_for_simple_unpack(
         common_prefix = min(template_prefix, actual_prefix)
         common_suffix = min(template_suffix, actual_suffix)
         if actual_prefix >= template_prefix and actual_suffix >= template_suffix:
-            # This is the only case where we can guarantee there will be no partial overlap.
+            # This is the only case where we can guarantee there will be no partial overlap
+            # (note however partial overlap is OK for variadic tuples, it is handled below).
             t_unpack = template_args[template_unpack]
 
     # Handle constraints from prefixes/suffixes first.
@@ -1439,74 +1479,21 @@ def build_constraints_for_simple_unpack(
                         res.extend(infer_constraints(tp.args[0], a_tp.args[0], direction))
         elif isinstance(tp, TypeVarTupleType):
             res.append(Constraint(tp, direction, TupleType(list(middle), tp.tuple_fallback)))
+    elif actual_unpack is not None:
+        # A special case for a variadic tuple unpack, we simply infer T <: X from
+        # Tuple[..., *tuple[T, ...], ...] <: Tuple[..., *tuple[X, ...], ...].
+        actual_unpack_type = actual_args[actual_unpack]
+        assert isinstance(actual_unpack_type, UnpackType)
+        a_unpacked = get_proper_type(actual_unpack_type.type)
+        if isinstance(a_unpacked, Instance) and a_unpacked.type.fullname == "builtins.tuple":
+            t_unpack = template_args[template_unpack]
+            assert isinstance(t_unpack, UnpackType)
+            tp = get_proper_type(t_unpack.type)
+            if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple":
+                res.extend(infer_constraints(tp.args[0], a_unpacked.args[0], direction))
     return res
 
 
-def build_constraints_for_unpack(
-    # TODO: this naming is misleading, these should be "actual", not "mapped"
-    # both template and actual can be mapped before, depending on direction.
-    # Also the convention is to put template related args first.
-    mapped: tuple[Type, ...],
-    mapped_prefix_len: int | None,
-    mapped_suffix_len: int | None,
-    template: tuple[Type, ...],
-    template_prefix_len: int,
-    template_suffix_len: int,
-    direction: int,
-) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]:
-    # TODO: this function looks broken:
-    # a) it should take into account variances, but it doesn't
-    # b) it looks like both call sites always pass identical values to args (2, 3) and (5, 6)
-    # because after map_instance_to_supertype() both template and actual have same TypeInfo.
-    if mapped_prefix_len is None:
-        mapped_prefix_len = template_prefix_len
-    if mapped_suffix_len is None:
-        mapped_suffix_len = template_suffix_len
-
-    split_result = split_with_mapped_and_template(
-        mapped,
-        mapped_prefix_len,
-        mapped_suffix_len,
-        template,
-        template_prefix_len,
-        template_suffix_len,
-    )
-    assert split_result is not None
-    (
-        mapped_prefix,
-        mapped_middle,
-        mapped_suffix,
-        template_prefix,
-        template_middle,
-        template_suffix,
-    ) = split_result
-
-    template_unpack = extract_unpack(template_middle)
-    res = []
-
-    if template_unpack is not None:
-        if isinstance(template_unpack, TypeVarTupleType):
-            res.append(
-                Constraint(
-                    template_unpack,
-                    direction,
-                    TupleType(list(mapped_middle), template_unpack.tuple_fallback),
-                )
-            )
-        elif (
-            isinstance(template_unpack, Instance)
-            and template_unpack.type.fullname == "builtins.tuple"
-        ):
-            for item in mapped_middle:
-                res.extend(infer_constraints(template_unpack.args[0], item, direction))
-
-        elif isinstance(template_unpack, TupleType):
-            if len(template_unpack.items) == len(mapped_middle):
-                for template_arg, item in zip(template_unpack.items, mapped_middle):
-                    res.extend(infer_constraints(template_arg, item, direction))
-    return res, mapped_prefix + mapped_suffix, template_prefix + template_suffix
-
-
 def infer_directed_arg_constraints(left: Type, right: Type, direction: int) -> list[Constraint]:
     """Infer constraints between two arguments using direction between original callables."""
     if isinstance(left, (ParamSpecType, UnpackType)) or isinstance(
diff --git a/mypy/erasetype.py b/mypy/erasetype.py
index d1a01fb6c779..24471f918319 100644
--- a/mypy/erasetype.py
+++ b/mypy/erasetype.py
@@ -77,7 +77,16 @@ def visit_deleted_type(self, t: DeletedType) -> ProperType:
         return t
 
     def visit_instance(self, t: Instance) -> ProperType:
-        return Instance(t.type, [AnyType(TypeOfAny.special_form)] * len(t.args), t.line)
+        args: list[Type] = []
+        for tv in t.type.defn.type_vars:
+            # Valid erasure for *Ts is *tuple[Any, ...], not just Any.
+            if isinstance(tv, TypeVarTupleType):
+                args.append(
+                    tv.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)])
+                )
+            else:
+                args.append(AnyType(TypeOfAny.special_form))
+        return Instance(t.type, args, t.line)
 
     def visit_type_var(self, t: TypeVarType) -> ProperType:
         return AnyType(TypeOfAny.special_form)
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index c29fcb167777..b233561e19c2 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -255,7 +255,8 @@ def visit_param_spec(self, t: ParamSpecType) -> Type:
                 variables=[*t.prefix.variables, *repl.variables],
             )
         else:
-            # TODO: replace this with "assert False"
+            # We could encode Any as trivial parameters etc., but it would be too verbose.
+            # TODO: assert this is a trivial type, like Any, Never, or object.
             return repl
 
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type:
diff --git a/mypy/fixup.py b/mypy/fixup.py
index 2b2e1210ee4e..5ffc47120734 100644
--- a/mypy/fixup.py
+++ b/mypy/fixup.py
@@ -81,11 +81,17 @@ def visit_type_info(self, info: TypeInfo) -> None:
                 info.update_tuple_type(info.tuple_type)
                 if info.special_alias:
                     info.special_alias.alias_tvars = list(info.defn.type_vars)
+                    for i, t in enumerate(info.defn.type_vars):
+                        if isinstance(t, TypeVarTupleType):
+                            info.special_alias.tvar_tuple_index = i
             if info.typeddict_type:
                 info.typeddict_type.accept(self.type_fixer)
                 info.update_typeddict_type(info.typeddict_type)
                 if info.special_alias:
                     info.special_alias.alias_tvars = list(info.defn.type_vars)
+                    for i, t in enumerate(info.defn.type_vars):
+                        if isinstance(t, TypeVarTupleType):
+                            info.special_alias.tvar_tuple_index = i
             if info.declared_metaclass:
                 info.declared_metaclass.accept(self.type_fixer)
             if info.metaclass_type:
@@ -166,11 +172,7 @@ def visit_decorator(self, d: Decorator) -> None:
 
     def visit_class_def(self, c: ClassDef) -> None:
         for v in c.type_vars:
-            if isinstance(v, TypeVarType):
-                for value in v.values:
-                    value.accept(self.type_fixer)
-            v.upper_bound.accept(self.type_fixer)
-            v.default.accept(self.type_fixer)
+            v.accept(self.type_fixer)
 
     def visit_type_var_expr(self, tv: TypeVarExpr) -> None:
         for value in tv.values:
@@ -184,6 +186,7 @@ def visit_paramspec_expr(self, p: ParamSpecExpr) -> None:
 
     def visit_type_var_tuple_expr(self, tv: TypeVarTupleExpr) -> None:
         tv.upper_bound.accept(self.type_fixer)
+        tv.tuple_fallback.accept(self.type_fixer)
         tv.default.accept(self.type_fixer)
 
     def visit_var(self, v: Var) -> None:
@@ -314,6 +317,7 @@ def visit_param_spec(self, p: ParamSpecType) -> None:
         p.default.accept(self)
 
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> None:
+        t.tuple_fallback.accept(self)
         t.upper_bound.accept(self)
         t.default.accept(self)
 
@@ -336,9 +340,6 @@ def visit_union_type(self, ut: UnionType) -> None:
             for it in ut.items:
                 it.accept(self)
 
-    def visit_void(self, o: Any) -> None:
-        pass  # Nothing to descend into.
-
     def visit_type_type(self, t: TypeType) -> None:
         t.item.accept(self)
 
diff --git a/mypy/join.py b/mypy/join.py
index 806c644a680c..e4429425d98a 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -43,8 +43,10 @@
     UninhabitedType,
     UnionType,
     UnpackType,
+    find_unpack_in_list,
     get_proper_type,
     get_proper_types,
+    split_with_prefix_and_suffix,
 )
 
 
@@ -67,7 +69,25 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType:
             args: list[Type] = []
             # N.B: We use zip instead of indexing because the lengths might have
             # mismatches during daemon reprocessing.
-            for ta, sa, type_var in zip(t.args, s.args, t.type.defn.type_vars):
+            if t.type.has_type_var_tuple_type:
+                # We handle joins of variadic instances by simply creating correct mapping
+                # for type arguments and compute the individual joins same as for regular
+                # instances. All the heavy lifting is done in the join of tuple types.
+                assert s.type.type_var_tuple_prefix is not None
+                assert s.type.type_var_tuple_suffix is not None
+                prefix = s.type.type_var_tuple_prefix
+                suffix = s.type.type_var_tuple_suffix
+                tvt = s.type.defn.type_vars[prefix]
+                assert isinstance(tvt, TypeVarTupleType)
+                fallback = tvt.tuple_fallback
+                s_prefix, s_middle, s_suffix = split_with_prefix_and_suffix(s.args, prefix, suffix)
+                t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix(t.args, prefix, suffix)
+                s_args = s_prefix + (TupleType(list(s_middle), fallback),) + s_suffix
+                t_args = t_prefix + (TupleType(list(t_middle), fallback),) + t_suffix
+            else:
+                t_args = t.args
+                s_args = s.args
+            for ta, sa, type_var in zip(t_args, s_args, t.type.defn.type_vars):
                 ta_proper = get_proper_type(ta)
                 sa_proper = get_proper_type(sa)
                 new_type: Type | None = None
@@ -93,6 +113,18 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType:
                         # If the types are different but equivalent, then an Any is involved
                         # so using a join in the contravariant case is also OK.
                         new_type = join_types(ta, sa, self)
+                elif isinstance(type_var, TypeVarTupleType):
+                    new_type = get_proper_type(join_types(ta, sa, self))
+                    # Put the joined arguments back into instance in the normal form:
+                    #   a) Tuple[X, Y, Z] -> [X, Y, Z]
+                    #   b) tuple[X, ...] -> [*tuple[X, ...]]
+                    if isinstance(new_type, Instance):
+                        assert new_type.type.fullname == "builtins.tuple"
+                        new_type = UnpackType(new_type)
+                    else:
+                        assert isinstance(new_type, TupleType)
+                        args.extend(new_type.items)
+                        continue
                 else:
                     # ParamSpec type variables behave the same, independent of variance
                     if not is_equivalent(ta, sa):
@@ -440,6 +472,113 @@ def visit_overloaded(self, t: Overloaded) -> ProperType:
                 return join_types(t, call)
         return join_types(t.fallback, s)
 
+    def join_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None:
+        """Join two tuple types while handling variadic entries.
+
+        This is surprisingly tricky, and we don't handle some tricky corner cases.
+        Most of the trickiness comes from the variadic tuple items like *tuple[X, ...]
+        since they can have arbitrary partial overlaps (while *Ts can't be split).
+        """
+        s_unpack_index = find_unpack_in_list(s.items)
+        t_unpack_index = find_unpack_in_list(t.items)
+        if s_unpack_index is None and t_unpack_index is None:
+            if s.length() == t.length():
+                items: list[Type] = []
+                for i in range(t.length()):
+                    items.append(join_types(t.items[i], s.items[i]))
+                return items
+            return None
+        if s_unpack_index is not None and t_unpack_index is not None:
+            # The most complex case: both tuples have an upack item.
+            s_unpack = s.items[s_unpack_index]
+            assert isinstance(s_unpack, UnpackType)
+            s_unpacked = get_proper_type(s_unpack.type)
+            t_unpack = t.items[t_unpack_index]
+            assert isinstance(t_unpack, UnpackType)
+            t_unpacked = get_proper_type(t_unpack.type)
+            if s.length() == t.length() and s_unpack_index == t_unpack_index:
+                # We can handle a case where arity is perfectly aligned, e.g.
+                # join(Tuple[X1, *tuple[Y1, ...], Z1], Tuple[X2, *tuple[Y2, ...], Z2]).
+                # We can essentially perform the join elementwise.
+                prefix_len = t_unpack_index
+                suffix_len = t.length() - t_unpack_index - 1
+                items = []
+                for si, ti in zip(s.items[:prefix_len], t.items[:prefix_len]):
+                    items.append(join_types(si, ti))
+                joined = join_types(s_unpacked, t_unpacked)
+                if isinstance(joined, TypeVarTupleType):
+                    items.append(UnpackType(joined))
+                elif isinstance(joined, Instance) and joined.type.fullname == "builtins.tuple":
+                    items.append(UnpackType(joined))
+                else:
+                    if isinstance(t_unpacked, Instance):
+                        assert t_unpacked.type.fullname == "builtins.tuple"
+                        tuple_instance = t_unpacked
+                    else:
+                        assert isinstance(t_unpacked, TypeVarTupleType)
+                        tuple_instance = t_unpacked.tuple_fallback
+                    items.append(
+                        UnpackType(
+                            tuple_instance.copy_modified(
+                                args=[object_from_instance(tuple_instance)]
+                            )
+                        )
+                    )
+                if suffix_len:
+                    for si, ti in zip(s.items[-suffix_len:], t.items[-suffix_len:]):
+                        items.append(join_types(si, ti))
+                return items
+            if s.length() == 1 or t.length() == 1:
+                # Another case we can handle is when one of tuple is purely variadic
+                # (i.e. a non-normalized form of tuple[X, ...]), in this case the join
+                # will be again purely variadic.
+                if not (isinstance(s_unpacked, Instance) and isinstance(t_unpacked, Instance)):
+                    return None
+                assert s_unpacked.type.fullname == "builtins.tuple"
+                assert t_unpacked.type.fullname == "builtins.tuple"
+                mid_joined = join_types(s_unpacked.args[0], t_unpacked.args[0])
+                t_other = [a for i, a in enumerate(t.items) if i != t_unpack_index]
+                s_other = [a for i, a in enumerate(s.items) if i != s_unpack_index]
+                other_joined = join_type_list(s_other + t_other)
+                mid_joined = join_types(mid_joined, other_joined)
+                return [UnpackType(s_unpacked.copy_modified(args=[mid_joined]))]
+            # TODO: are there other case we can handle (e.g. both prefix/suffix are shorter)?
+            return None
+        if s_unpack_index is not None:
+            variadic = s
+            unpack_index = s_unpack_index
+            fixed = t
+        else:
+            assert t_unpack_index is not None
+            variadic = t
+            unpack_index = t_unpack_index
+            fixed = s
+        # Case where one tuple has variadic item and the other one doesn't. The join will
+        # be variadic, since fixed tuple is a subtype of variadic, but not vice versa.
+        unpack = variadic.items[unpack_index]
+        assert isinstance(unpack, UnpackType)
+        unpacked = get_proper_type(unpack.type)
+        if not isinstance(unpacked, Instance):
+            return None
+        if fixed.length() < variadic.length() - 1:
+            # There are no non-trivial types that are supertype of both.
+            return None
+        prefix_len = unpack_index
+        suffix_len = variadic.length() - prefix_len - 1
+        prefix, middle, suffix = split_with_prefix_and_suffix(
+            tuple(fixed.items), prefix_len, suffix_len
+        )
+        items = []
+        for fi, vi in zip(prefix, variadic.items[:prefix_len]):
+            items.append(join_types(fi, vi))
+        mid_joined = join_type_list(list(middle))
+        mid_joined = join_types(mid_joined, unpacked.args[0])
+        items.append(UnpackType(unpacked.copy_modified(args=[mid_joined])))
+        if suffix_len:
+            for fi, vi in zip(suffix, variadic.items[-suffix_len:]):
+                items.append(join_types(fi, vi))
+        return items
+
     def visit_tuple_type(self, t: TupleType) -> ProperType:
         # When given two fixed-length tuples:
         # * If they have the same length, join their subtypes item-wise:
@@ -452,19 +591,22 @@ def visit_tuple_type(self, t: TupleType) -> ProperType:
         #   Tuple[int, bool] + Tuple[bool, ...] becomes Tuple[int, ...]
         # * Joining with any Sequence also returns a Sequence:
         #   Tuple[int, bool] + List[bool] becomes Sequence[int]
-        if isinstance(self.s, TupleType) and self.s.length() == t.length():
+        if isinstance(self.s, TupleType):
             if self.instance_joiner is None:
                 self.instance_joiner = InstanceJoiner()
             fallback = self.instance_joiner.join_instances(
                 mypy.typeops.tuple_fallback(self.s), mypy.typeops.tuple_fallback(t)
             )
             assert isinstance(fallback, Instance)
-            if self.s.length() == t.length():
-                items: list[Type] = []
-                for i in range(t.length()):
-                    items.append(join_types(t.items[i], self.s.items[i]))
+            items = self.join_tuples(self.s, t)
+            if items is not None:
                 return TupleType(items, fallback)
             else:
+                # TODO: should this be a default fallback behaviour like for meet?
+                if is_proper_subtype(self.s, t):
+                    return t
+                if is_proper_subtype(t, self.s):
+                    return self.s
                 return fallback
         else:
             return join_types(self.s, mypy.typeops.tuple_fallback(t))
diff --git a/mypy/meet.py b/mypy/meet.py
index 2efde4ac7588..0fa500d32c30 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -44,8 +44,10 @@
     UninhabitedType,
     UnionType,
     UnpackType,
+    find_unpack_in_list,
     get_proper_type,
     get_proper_types,
+    split_with_prefix_and_suffix,
 )
 
 # TODO Describe this module.
@@ -721,8 +723,41 @@ def visit_instance(self, t: Instance) -> ProperType:
                     args: list[Type] = []
                     # N.B: We use zip instead of indexing because the lengths might have
                     # mismatches during daemon reprocessing.
-                    for ta, sia in zip(t.args, self.s.args):
-                        args.append(self.meet(ta, sia))
+                    if t.type.has_type_var_tuple_type:
+                        # We handle meet of variadic instances by simply creating correct mapping
+                        # for type arguments and compute the individual meets same as for regular
+                        # instances. All the heavy lifting is done in the meet of tuple types.
+                        s = self.s
+                        assert s.type.type_var_tuple_prefix is not None
+                        assert s.type.type_var_tuple_suffix is not None
+                        prefix = s.type.type_var_tuple_prefix
+                        suffix = s.type.type_var_tuple_suffix
+                        tvt = s.type.defn.type_vars[prefix]
+                        assert isinstance(tvt, TypeVarTupleType)
+                        fallback = tvt.tuple_fallback
+                        s_prefix, s_middle, s_suffix = split_with_prefix_and_suffix(
+                            s.args, prefix, suffix
+                        )
+                        t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix(
+                            t.args, prefix, suffix
+                        )
+                        s_args = s_prefix + (TupleType(list(s_middle), fallback),) + s_suffix
+                        t_args = t_prefix + (TupleType(list(t_middle), fallback),) + t_suffix
+                    else:
+                        t_args = t.args
+                        s_args = self.s.args
+                    for ta, sa, tv in zip(t_args, s_args, t.type.defn.type_vars):
+                        meet = self.meet(ta, sa)
+                        if isinstance(tv, TypeVarTupleType):
+                            # Correctly unpack possible outcomes of meets of tuples: it can be
+                            # either another tuple type or Never (normalized as *tuple[Never, ...])
+                            if isinstance(meet, TupleType):
+                                args.extend(meet.items)
+                                continue
+                            else:
+                                assert isinstance(meet, UninhabitedType)
+                                meet = UnpackType(tv.tuple_fallback.copy_modified(args=[meet]))
+                        args.append(meet)
                     return Instance(t.type, args)
                 else:
                     if state.strict_optional:
@@ -811,11 +846,82 @@ def visit_overloaded(self, t: Overloaded) -> ProperType:
                 return meet_types(t, call)
         return meet_types(t.fallback, s)
 
+    def meet_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None:
+        """Meet two tuple types while handling variadic entries.
+
+        This is surprisingly tricky, and we don't handle some tricky corner cases.
+        Most of the trickiness comes from the variadic tuple items like *tuple[X, ...]
+        since they can have arbitrary partial overlaps (while *Ts can't be split). This
+        function is roughly a mirror of join_tuples() w.r.t. to the fact that fixed
+        tuples are subtypes of variadic ones but not vice versa.
+        """
+        s_unpack_index = find_unpack_in_list(s.items)
+        t_unpack_index = find_unpack_in_list(t.items)
+        if s_unpack_index is None and t_unpack_index is None:
+            if s.length() == t.length():
+                items: list[Type] = []
+                for i in range(t.length()):
+                    items.append(self.meet(t.items[i], s.items[i]))
+                return items
+            return None
+        if s_unpack_index is not None and t_unpack_index is not None:
+            # The only simple case we can handle if both tuples are variadic
+            # is when they are purely variadic. Other cases are tricky because
+            # a variadic item is effectively a union of tuples of all length, thus
+            # potentially causing overlap between a suffix in `s` and a prefix
+            # in `t` (see how this is handled in is_subtype() for details).
+            # TODO: handle more cases (like when both prefix/suffix are shorter in s or t).
+            if s.length() == 1 and t.length() == 1:
+                s_unpack = s.items[0]
+                assert isinstance(s_unpack, UnpackType)
+                s_unpacked = get_proper_type(s_unpack.type)
+                t_unpack = t.items[0]
+                assert isinstance(t_unpack, UnpackType)
+                t_unpacked = get_proper_type(t_unpack.type)
+                if not (isinstance(s_unpacked, Instance) and isinstance(t_unpacked, Instance)):
+                    return None
+                meet = self.meet(s_unpacked, t_unpacked)
+                if not isinstance(meet, Instance):
+                    return None
+                return [UnpackType(meet)]
+            return None
+        if s_unpack_index is not None:
+            variadic = s
+            unpack_index = s_unpack_index
+            fixed = t
+        else:
+            assert t_unpack_index is not None
+            variadic = t
+            unpack_index = t_unpack_index
+            fixed = s
+        # If one tuple is variadic one, and the other one is fixed, the meet will be fixed.
+        unpack = variadic.items[unpack_index]
+        assert isinstance(unpack, UnpackType)
+        unpacked = get_proper_type(unpack.type)
+        if not isinstance(unpacked, Instance):
+            return None
+        if fixed.length() < variadic.length() - 1:
+            return None
+        prefix_len = unpack_index
+        suffix_len = variadic.length() - prefix_len - 1
+        prefix, middle, suffix = split_with_prefix_and_suffix(
+            tuple(fixed.items), prefix_len, suffix_len
+        )
+        items = []
+        for fi, vi in zip(prefix, variadic.items[:prefix_len]):
+            items.append(self.meet(fi, vi))
+        for mi in middle:
+            items.append(self.meet(mi, unpacked.args[0]))
+        if suffix_len:
+            for fi, vi in zip(suffix, variadic.items[-suffix_len:]):
+                items.append(self.meet(fi, vi))
+        return items
+
     def visit_tuple_type(self, t: TupleType) -> ProperType:
-        if isinstance(self.s, TupleType) and self.s.length() == t.length():
-            items: list[Type] = []
-            for i in range(t.length()):
-                items.append(self.meet(t.items[i], self.s.items[i]))
+        if isinstance(self.s, TupleType):
+            items = self.meet_tuples(self.s, t)
+            if items is None:
+                return self.default(self.s)
             # TODO: What if the fallbacks are different?
             return TupleType(items, tuple_fallback(t))
         elif isinstance(self.s, Instance):
@@ -825,6 +931,10 @@ def visit_tuple_type(self, t: TupleType) -> ProperType:
             elif is_proper_subtype(t, self.s):
                 # A named tuple that inherits from a normal class
                 return t
+            elif self.s.type.has_type_var_tuple_type and is_subtype(t, self.s):
+                # This is a bit ad-hoc but more principled handling is tricky, and this
+                # special case is important for type narrowing in binder to work.
+                return t
         return self.default(self.s)
 
     def visit_typeddict_type(self, t: TypedDictType) -> ProperType:
diff --git a/mypy/semanal.py b/mypy/semanal.py
index ec4d32aefeb9..70403eed57ae 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -4414,7 +4414,8 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool:
             typevartuple_var = TypeVarTupleExpr(
                 name,
                 self.qualified_name(name),
-                self.object_type(),
+                # Upper bound for *Ts is *tuple[object, ...], it can never be object.
+                tuple_fallback.copy_modified(),
                 tuple_fallback,
                 default,
                 INVARIANT,
diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py
index ed04b30e90ba..a25bab8de054 100644
--- a/mypy/semanal_typeargs.py
+++ b/mypy/semanal_typeargs.py
@@ -207,7 +207,8 @@ def visit_unpack_type(self, typ: UnpackType) -> None:
             return
         if isinstance(proper_type, TypeVarTupleType):
             return
-        # TODO: this should probably be .has_base("builtins.tuple"), also elsewhere.
+        # TODO: this should probably be .has_base("builtins.tuple"), also elsewhere. This is
+        # tricky however, since this needs map_instance_to_supertype() available in many places.
         if isinstance(proper_type, Instance) and proper_type.type.fullname == "builtins.tuple":
             return
         if not isinstance(proper_type, (UnboundType, AnyType)):
diff --git a/mypy/solve.py b/mypy/solve.py
index 17e1ca047818..7cdf1c10c9b5 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -144,6 +144,8 @@ def solve_with_dependent(
         if all(not lowers[tv] and not uppers[tv] for tv in scc):
             best_free = choose_free([originals[tv] for tv in scc], original_vars)
             if best_free:
+                # TODO: failing to choose may cause leaking type variables,
+                # we need to fail gracefully instead.
                 free_vars.append(best_free.id)
                 free_solutions[best_free.id] = best_free
 
@@ -323,13 +325,15 @@ def test(x: U) -> U: ...
     best = sorted(scc, key=lambda x: (x.id not in original_vars, x.id.raw_id))[0]
     if isinstance(best, TypeVarType):
         return best.copy_modified(values=values, upper_bound=common_upper_bound)
-    if is_trivial_bound(common_upper_bound_p):
+    if is_trivial_bound(common_upper_bound_p, allow_tuple=True):
         # TODO: support more cases for ParamSpecs/TypeVarTuples
         return best
     return None
 
 
-def is_trivial_bound(tp: ProperType) -> bool:
+def is_trivial_bound(tp: ProperType, allow_tuple: bool = False) -> bool:
+    if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple":
+        return allow_tuple and is_trivial_bound(get_proper_type(tp.args[0]))
     return isinstance(tp, Instance) and tp.type.fullname == "builtins.object"
 
 
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 58ae4efdf582..fdde1c24670e 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -58,13 +58,14 @@
     UninhabitedType,
     UnionType,
     UnpackType,
+    find_unpack_in_list,
     get_proper_type,
     is_named_instance,
+    split_with_prefix_and_suffix,
 )
 from mypy.types_utils import flatten_types
 from mypy.typestate import SubtypeKind, type_state
 from mypy.typevars import fill_typevars_with_any
-from mypy.typevartuples import extract_unpack, fully_split_with_mapped_and_template
 
 # Flags for detected protocol members
 IS_SETTABLE: Final = 1
@@ -278,7 +279,13 @@ def _is_subtype(
     left = get_proper_type(left)
     right = get_proper_type(right)
 
-    if not proper_subtype and isinstance(right, (AnyType, UnboundType, ErasedType)):
+    # Note: Unpack type should not be a subtype of Any, since it may represent
+    # multiple types. This should always go through the visitor, to check arity.
+    if (
+        not proper_subtype
+        and isinstance(right, (AnyType, UnboundType, ErasedType))
+        and not isinstance(left, UnpackType)
+    ):
         # TODO: should we consider all types proper subtypes of UnboundType and/or
         # ErasedType as we do for non-proper subtyping.
         return True
@@ -437,6 +444,34 @@ def visit_instance(self, left: Instance) -> bool:
         right = self.right
         if isinstance(right, TupleType) and right.partial_fallback.type.is_enum:
             return self._is_subtype(left, mypy.typeops.tuple_fallback(right))
+        if isinstance(right, TupleType):
+            if len(right.items) == 1:
+                # Non-normalized Tuple type (may be left after semantic analysis
+                # because semanal_typearg visitor is not a type translator).
+                item = right.items[0]
+                if isinstance(item, UnpackType):
+                    unpacked = get_proper_type(item.type)
+                    if isinstance(unpacked, Instance):
+                        return self._is_subtype(left, unpacked)
+            if left.type.has_base(right.partial_fallback.type.fullname):
+                # Special case to consider Foo[*tuple[Any, ...]] (i.e. bare Foo) a
+                # subtype of Foo[<whatever>], when Foo is user defined variadic tuple type.
+                mapped = map_instance_to_supertype(left, right.partial_fallback.type)
+                if len(mapped.args) == 1 and isinstance(mapped.args[0], UnpackType):
+                    unpacked = get_proper_type(mapped.args[0].type)
+                    if isinstance(unpacked, Instance):
+                        assert unpacked.type.fullname == "builtins.tuple"
+                        if isinstance(get_proper_type(unpacked.args[0]), AnyType):
+                            return not self.proper_subtype
+            # TODO: we need a special case similar to above to consider (something that maps to)
+            # tuple[Any, ...] a subtype of Tuple[<whatever>].
+            return False
+        if isinstance(right, TypeVarTupleType):
+            # tuple[Any, ...] is like Any in the world of tuples (see special case above).
+            if left.type.has_base("builtins.tuple"):
+                mapped = map_instance_to_supertype(left, right.tuple_fallback.type)
+                if isinstance(get_proper_type(mapped.args[0]), AnyType):
+                    return not self.proper_subtype
         if isinstance(right, Instance):
             if type_state.is_cached_subtype_check(self._subtype_kind, left, right):
                 return True
@@ -476,106 +511,37 @@ def visit_instance(self, left: Instance) -> bool:
                     t = erased
                 nominal = True
                 if right.type.has_type_var_tuple_type:
-                    assert left.type.type_var_tuple_prefix is not None
-                    assert left.type.type_var_tuple_suffix is not None
+                    # For variadic instances we simply find the correct type argument mappings,
+                    # all the heavy lifting is done by the tuple subtyping.
                     assert right.type.type_var_tuple_prefix is not None
                     assert right.type.type_var_tuple_suffix is not None
-                    split_result = fully_split_with_mapped_and_template(
-                        left.args,
-                        left.type.type_var_tuple_prefix,
-                        left.type.type_var_tuple_suffix,
-                        right.args,
-                        right.type.type_var_tuple_prefix,
-                        right.type.type_var_tuple_suffix,
+                    prefix = right.type.type_var_tuple_prefix
+                    suffix = right.type.type_var_tuple_suffix
+                    tvt = right.type.defn.type_vars[prefix]
+                    assert isinstance(tvt, TypeVarTupleType)
+                    fallback = tvt.tuple_fallback
+                    left_prefix, left_middle, left_suffix = split_with_prefix_and_suffix(
+                        t.args, prefix, suffix
                     )
-                    if split_result is None:
-                        return False
-
-                    (
-                        left_prefix,
-                        left_mprefix,
-                        left_middle,
-                        left_msuffix,
-                        left_suffix,
-                        right_prefix,
-                        right_mprefix,
-                        right_middle,
-                        right_msuffix,
-                        right_suffix,
-                    ) = split_result
-
-                    left_unpacked = extract_unpack(left_middle)
-                    right_unpacked = extract_unpack(right_middle)
-
-                    # Helper for case 2 below so we can treat them the same.
-                    def check_mixed(
-                        unpacked_type: ProperType, compare_to: tuple[Type, ...]
-                    ) -> bool:
-                        if (
-                            isinstance(unpacked_type, Instance)
-                            and unpacked_type.type.fullname == "builtins.tuple"
-                        ):
-                            return all(is_equivalent(l, unpacked_type.args[0]) for l in compare_to)
-                        if isinstance(unpacked_type, TypeVarTupleType):
-                            return False
-                        if isinstance(unpacked_type, AnyType):
-                            return True
-                        if isinstance(unpacked_type, TupleType):
-                            if len(unpacked_type.items) != len(compare_to):
-                                return False
-                            for t1, t2 in zip(unpacked_type.items, compare_to):
-                                if not is_equivalent(t1, t2):
-                                    return False
-                            return True
-                        return False
-
-                    # Case 1: Both are unpacks, in this case we check what is being
-                    # unpacked is the same.
-                    if left_unpacked is not None and right_unpacked is not None:
-                        if not is_equivalent(left_unpacked, right_unpacked):
-                            return False
-
-                    # Case 2: Only one of the types is an unpack. The equivalence
-                    # case is mostly the same but we check some additional
-                    # things when unpacking on the right.
-                    elif left_unpacked is not None and right_unpacked is None:
-                        if not check_mixed(left_unpacked, right_middle):
-                            return False
-                    elif left_unpacked is None and right_unpacked is not None:
-                        if not check_mixed(right_unpacked, left_middle):
-                            return False
-
-                    # Case 3: Neither type is an unpack. In this case we just compare
-                    # the items themselves.
-                    else:
-                        if len(left_middle) != len(right_middle):
-                            return False
-                        for left_t, right_t in zip(left_middle, right_middle):
-                            if not is_equivalent(left_t, right_t):
-                                return False
-
-                    assert len(left_mprefix) == len(right_mprefix)
-                    assert len(left_msuffix) == len(right_msuffix)
-
-                    for left_item, right_item in zip(
-                        left_mprefix + left_msuffix, right_mprefix + right_msuffix
-                    ):
-                        if not is_equivalent(left_item, right_item):
-                            return False
-
-                    left_items = t.args[: right.type.type_var_tuple_prefix]
-                    right_items = right.args[: right.type.type_var_tuple_prefix]
-                    if right.type.type_var_tuple_suffix:
-                        left_items += t.args[-right.type.type_var_tuple_suffix :]
-                        right_items += right.args[-right.type.type_var_tuple_suffix :]
-                    unpack_index = right.type.type_var_tuple_prefix
-                    assert unpack_index is not None
-                    type_params = zip(
-                        left_prefix + left_suffix,
-                        right_prefix + right_suffix,
-                        right.type.defn.type_vars[:unpack_index]
-                        + right.type.defn.type_vars[unpack_index + 1 :],
+                    right_prefix, right_middle, right_suffix = split_with_prefix_and_suffix(
+                        right.args, prefix, suffix
+                    )
+                    left_args = (
+                        left_prefix + (TupleType(list(left_middle), fallback),) + left_suffix
                     )
+                    right_args = (
+                        right_prefix + (TupleType(list(right_middle), fallback),) + right_suffix
+                    )
+                    if len(t.args) == 1 and isinstance(t.args[0], UnpackType):
+                        unpacked = get_proper_type(t.args[0].type)
+                        if isinstance(unpacked, Instance):
+                            assert unpacked.type.fullname == "builtins.tuple"
+                            if (
+                                isinstance(get_proper_type(unpacked.args[0]), AnyType)
+                                and not self.proper_subtype
+                            ):
+                                return True
+                    type_params = zip(left_args, right_args, right.type.defn.type_vars)
                 else:
                     type_params = zip(t.args, right.args, right.type.defn.type_vars)
                 if not self.subtype_context.ignore_type_params:
@@ -761,8 +727,12 @@ def visit_tuple_type(self, left: TupleType) -> bool:
                 return True
             return False
         elif isinstance(right, TupleType):
+            # If right has a variadic unpack this needs special handling. If there is a TypeVarTuple
+            # unpack, item count must coincide. If the left has variadic unpack but right
+            # doesn't have one, we will fall through to False down the line.
+            if self.variadic_tuple_subtype(left, right):
+                return True
             if len(left.items) != len(right.items):
-                # TODO: handle tuple with variadic items better.
                 return False
             if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)):
                 return False
@@ -778,6 +748,79 @@ def visit_tuple_type(self, left: TupleType) -> bool:
         else:
             return False
 
+    def variadic_tuple_subtype(self, left: TupleType, right: TupleType) -> bool:
+        """Check subtyping between two potentially variadic tuples.
+
+        Most non-trivial cases here are due to variadic unpacks like *tuple[X, ...],
+        we handle such unpacks as infinite unions Tuple[()] | Tuple[X] | Tuple[X, X] | ...
+
+        Note: the cases where right is fixed or has *Ts unpack should be handled
+        by the caller.
+        """
+        right_unpack_index = find_unpack_in_list(right.items)
+        if right_unpack_index is None:
+            # This case should be handled by the caller.
+            return False
+        right_unpack = right.items[right_unpack_index]
+        assert isinstance(right_unpack, UnpackType)
+        right_unpacked = get_proper_type(right_unpack.type)
+        if not isinstance(right_unpacked, Instance):
+            # This case should be handled by the caller.
+            return False
+        assert right_unpacked.type.fullname == "builtins.tuple"
+        right_item = right_unpacked.args[0]
+        right_prefix = right_unpack_index
+        right_suffix = len(right.items) - right_prefix - 1
+        left_unpack_index = find_unpack_in_list(left.items)
+        if left_unpack_index is None:
+            # Simple case: left is fixed, simply find correct mapping to the right
+            # (effectively selecting item with matching length from an infinite union).
+            if len(left.items) < right_prefix + right_suffix:
+                return False
+            prefix, middle, suffix = split_with_prefix_and_suffix(
+                tuple(left.items), right_prefix, right_suffix
+            )
+            if not all(
+                self._is_subtype(li, ri) for li, ri in zip(prefix, right.items[:right_prefix])
+            ):
+                return False
+            if right_suffix and not all(
+                self._is_subtype(li, ri) for li, ri in zip(suffix, right.items[-right_suffix:])
+            ):
+                return False
+            return all(self._is_subtype(li, right_item) for li in middle)
+        else:
+            if len(left.items) < len(right.items):
+                # There are some items on the left that will never have a matching length
+                # on the right.
+                return False
+            left_unpack = left.items[left_unpack_index]
+            assert isinstance(left_unpack, UnpackType)
+            left_unpacked = get_proper_type(left_unpack.type)
+            if not isinstance(left_unpacked, Instance):
+                # *Ts unpacks can't be split.
+                return False
+            assert left_unpacked.type.fullname == "builtins.tuple"
+            left_item = left_unpacked.args[0]
+
+            # The most tricky case with two variadic unpacks we handle similar to union
+            # subtyping: *each* item on the left, must be a subtype of *some* item on the right.
+            # For this we first check the "asymptotic case", i.e. that both unpacks a subtypes,
+            # and then check subtyping for all finite overlaps.
+            if not self._is_subtype(left_item, right_item):
+                return False
+            left_prefix = left_unpack_index
+            left_suffix = len(left.items) - left_prefix - 1
+            max_overlap = max(0, right_prefix - left_prefix, right_suffix - left_suffix)
+            for overlap in range(max_overlap + 1):
+                repr_items = left.items[:left_prefix] + [left_item] * overlap
+                if left_suffix:
+                    repr_items += left.items[-left_suffix:]
+                left_repr = left.copy_modified(items=repr_items)
+                if not self._is_subtype(left_repr, right):
+                    return False
+            return True
+
     def visit_typeddict_type(self, left: TypedDictType) -> bool:
         right = self.right
         if isinstance(right, Instance):
diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py
index f40996145cba..5ec292f07056 100644
--- a/mypy/test/testconstraints.py
+++ b/mypy/test/testconstraints.py
@@ -82,40 +82,11 @@ def test_unpack_homogenous_tuple_with_prefix_and_suffix(self) -> None:
             Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d),
         }
 
-    def test_unpack_tuple(self) -> None:
-        fx = self.fx
-        assert set(
-            infer_constraints(
-                Instance(
-                    fx.gvi,
-                    [
-                        UnpackType(
-                            TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o]))
-                        )
-                    ],
-                ),
-                Instance(fx.gvi, [fx.a, fx.b]),
-                SUPERTYPE_OF,
-            )
-        ) == {
-            Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a),
-            Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.b),
-        }
-
     def test_unpack_with_prefix_and_suffix(self) -> None:
         fx = self.fx
         assert set(
             infer_constraints(
-                Instance(
-                    fx.gv2i,
-                    [
-                        fx.u,
-                        UnpackType(
-                            TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o]))
-                        ),
-                        fx.u,
-                    ],
-                ),
+                Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]),
                 Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]),
                 SUPERTYPE_OF,
             )
@@ -130,16 +101,7 @@ def test_unpack_tuple_length_non_match(self) -> None:
         fx = self.fx
         assert set(
             infer_constraints(
-                Instance(
-                    fx.gv2i,
-                    [
-                        fx.u,
-                        UnpackType(
-                            TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o]))
-                        ),
-                        fx.u,
-                    ],
-                ),
+                Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]),
                 Instance(fx.gv2i, [fx.a, fx.b, fx.d]),
                 SUPERTYPE_OF,
             )
diff --git a/mypy/test/testsubtypes.py b/mypy/test/testsubtypes.py
index 464f64d2b846..480fe38a90a7 100644
--- a/mypy/test/testsubtypes.py
+++ b/mypy/test/testsubtypes.py
@@ -4,7 +4,7 @@
 from mypy.subtypes import is_subtype
 from mypy.test.helpers import Suite
 from mypy.test.typefixture import InterfaceTypeFixture, TypeFixture
-from mypy.types import Instance, TupleType, Type, UnpackType
+from mypy.types import Instance, Type, UnpackType
 
 
 class SubtypingSuite(Suite):
@@ -221,10 +221,6 @@ def test_type_var_tuple(self) -> None:
             Instance(self.fx.gvi, [UnpackType(self.fx.us)]),
         )
 
-        self.assert_subtype(
-            Instance(self.fx.gvi, [UnpackType(self.fx.anyt)]),
-            Instance(self.fx.gvi, [self.fx.anyt]),
-        )
         self.assert_not_subtype(
             Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), Instance(self.fx.gvi, [])
         )
@@ -272,83 +268,8 @@ def test_type_var_tuple_with_prefix_suffix(self) -> None:
             Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss), self.fx.b, self.fx.c]),
         )
 
-    def test_type_var_tuple_unpacked_varlength_tuple(self) -> None:
-        self.assert_subtype(
-            Instance(
-                self.fx.gvi,
-                [
-                    UnpackType(
-                        TupleType(
-                            [self.fx.a, self.fx.b],
-                            fallback=Instance(self.fx.std_tuplei, [self.fx.o]),
-                        )
-                    )
-                ],
-            ),
-            Instance(self.fx.gvi, [self.fx.a, self.fx.b]),
-        )
-
-    def test_type_var_tuple_unpacked_tuple(self) -> None:
-        self.assert_subtype(
-            Instance(
-                self.fx.gvi,
-                [
-                    UnpackType(
-                        TupleType(
-                            [self.fx.a, self.fx.b],
-                            fallback=Instance(self.fx.std_tuplei, [self.fx.o]),
-                        )
-                    )
-                ],
-            ),
-            Instance(self.fx.gvi, [self.fx.a, self.fx.b]),
-        )
-        self.assert_subtype(
-            Instance(
-                self.fx.gvi,
-                [
-                    UnpackType(
-                        TupleType(
-                            [self.fx.a, self.fx.b],
-                            fallback=Instance(self.fx.std_tuplei, [self.fx.o]),
-                        )
-                    )
-                ],
-            ),
-            Instance(self.fx.gvi, [self.fx.anyt, self.fx.anyt]),
-        )
-        self.assert_not_subtype(
-            Instance(
-                self.fx.gvi,
-                [
-                    UnpackType(
-                        TupleType(
-                            [self.fx.a, self.fx.b],
-                            fallback=Instance(self.fx.std_tuplei, [self.fx.o]),
-                        )
-                    )
-                ],
-            ),
-            Instance(self.fx.gvi, [self.fx.a]),
-        )
-        self.assert_not_subtype(
-            Instance(
-                self.fx.gvi,
-                [
-                    UnpackType(
-                        TupleType(
-                            [self.fx.a, self.fx.b],
-                            fallback=Instance(self.fx.std_tuplei, [self.fx.o]),
-                        )
-                    )
-                ],
-            ),
-            # Order flipped here.
-            Instance(self.fx.gvi, [self.fx.b, self.fx.a]),
-        )
-
     def test_type_var_tuple_unpacked_variable_length_tuple(self) -> None:
-        self.assert_equivalent(
+        self.assert_subtype(
             Instance(self.fx.gvi, [self.fx.a, self.fx.a]),
             Instance(self.fx.gvi, [UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))]),
         )
diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py
index 59457dfa5d3b..e8dd623bec53 100644
--- a/mypy/test/testtypes.py
+++ b/mypy/test/testtypes.py
@@ -47,6 +47,7 @@
     UnboundType,
     UninhabitedType,
     UnionType,
+    UnpackType,
     get_proper_type,
     has_recursive_types,
 )
@@ -986,6 +987,54 @@ def test_literal_type(self) -> None:
             UnionType([lit2, lit3]), UnionType([lit1, lit2]), UnionType([lit2, lit3, lit1])
         )
 
+    def test_variadic_tuple_joins(self) -> None:
+        # These tests really test just the "arity", to be sure it is handled correctly.
+        self.assert_join(
+            self.tuple(self.fx.a, self.fx.a),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+        )
+        self.assert_join(
+            self.tuple(self.fx.a, self.fx.a),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a),
+        )
+        self.assert_join(
+            self.tuple(self.fx.a, self.fx.a),
+            self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+            self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+        )
+        self.assert_join(
+            self.tuple(
+                self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a
+            ),
+            self.tuple(
+                self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a
+            ),
+            self.tuple(
+                self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a
+            ),
+        )
+        self.assert_join(
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+            self.tuple(
+                self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a
+            ),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+        )
+        self.assert_join(
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+        )
+        self.assert_join(
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a),
+            self.tuple(
+                self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b])), self.fx.b
+            ),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a),
+        )
+
     # There are additional test cases in check-inference.test.
 
     # TODO: Function types + varargs and default args.
@@ -1221,6 +1270,34 @@ def assert_meet_uninhabited(self, s: Type, t: Type) -> None:
         with state.strict_optional_set(True):
             self.assert_meet(s, t, self.fx.uninhabited)
 
+    def test_variadic_tuple_meets(self) -> None:
+        # These tests really test just the "arity", to be sure it is handled correctly.
+        self.assert_meet(
+            self.tuple(self.fx.a, self.fx.a),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+            self.tuple(self.fx.a, self.fx.a),
+        )
+        self.assert_meet(
+            self.tuple(self.fx.a, self.fx.a),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a),
+            self.tuple(self.fx.a, self.fx.a),
+        )
+        self.assert_meet(
+            self.tuple(self.fx.a, self.fx.a),
+            self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+            self.tuple(self.fx.a, self.fx.a),
+        )
+        self.assert_meet(
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))),
+        )
+        self.assert_meet(
+            self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a),
+            self.tuple(self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b]))),
+            self.tuple(self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b]))),
+        )
+
     def assert_meet(self, s: Type, t: Type, meet: Type) -> None:
         self.assert_simple_meet(s, t, meet)
         self.assert_simple_meet(t, s, meet)
diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py
index 81af765f8585..b7bde16e6be2 100644
--- a/mypy/test/typefixture.py
+++ b/mypy/test/typefixture.py
@@ -233,9 +233,10 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy
                 AnyType(TypeOfAny.from_omitted_generics),
             )
 
-        self.ts = make_type_var_tuple("Ts", 1, self.o)  # Ts`1 (type var tuple)
-        self.ss = make_type_var_tuple("Ss", 2, self.o)  # Ss`2 (type var tuple)
-        self.us = make_type_var_tuple("Us", 3, self.o)  # Us`3 (type var tuple)
+        obj_tuple = self.std_tuple.copy_modified(args=[self.o])
+        self.ts = make_type_var_tuple("Ts", 1, obj_tuple)  # Ts`1 (type var tuple)
+        self.ss = make_type_var_tuple("Ss", 2, obj_tuple)  # Ss`2 (type var tuple)
+        self.us = make_type_var_tuple("Us", 3, obj_tuple)  # Us`3 (type var tuple)
 
         self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0)
         self.gv2i = self.make_type_info(
@@ -325,8 +326,8 @@ def make_type_info(
                             n,
                             n,
                             id,
-                            self.o,
-                            self.std_tuple,
+                            self.std_tuple.copy_modified(args=[self.o]),
+                            self.std_tuple.copy_modified(args=[self.o]),
                             AnyType(TypeOfAny.from_omitted_generics),
                         )
                     )
diff --git a/mypy/typeops.py b/mypy/typeops.py
index 3efa3cc3e965..3f50232f04c1 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -104,8 +104,8 @@ def tuple_fallback(typ: TupleType) -> Instance:
         if isinstance(item, UnpackType):
             unpacked_type = get_proper_type(item.type)
             if isinstance(unpacked_type, TypeVarTupleType):
-                items.append(unpacked_type.upper_bound)
-            elif (
+                unpacked_type = get_proper_type(unpacked_type.upper_bound)
+            if (
                 isinstance(unpacked_type, Instance)
                 and unpacked_type.type.fullname == "builtins.tuple"
             ):
@@ -654,8 +654,7 @@ def erase_def_to_union_or_bound(tdef: TypeVarLikeType) -> Type:
     # TODO(PEP612): fix for ParamSpecType
     if isinstance(tdef, ParamSpecType):
         return AnyType(TypeOfAny.from_error)
-    assert isinstance(tdef, TypeVarType)
-    if tdef.values:
+    if isinstance(tdef, TypeVarType) and tdef.values:
         return make_simplified_union(tdef.values)
     else:
         return tdef.upper_bound
diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py
index bcb5e96b615c..af2effbd4035 100644
--- a/mypy/typevartuples.py
+++ b/mypy/typevartuples.py
@@ -9,7 +9,6 @@
     ProperType,
     Type,
     UnpackType,
-    find_unpack_in_list,
     get_proper_type,
     split_with_prefix_and_suffix,
 )
@@ -25,139 +24,6 @@ def split_with_instance(
     )
 
 
-def split_with_mapped_and_template(
-    mapped: tuple[Type, ...],
-    mapped_prefix_len: int | None,
-    mapped_suffix_len: int | None,
-    template: tuple[Type, ...],
-    template_prefix_len: int,
-    template_suffix_len: int,
-) -> (
-    tuple[
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-    ]
-    | None
-):
-    split_result = fully_split_with_mapped_and_template(
-        mapped,
-        mapped_prefix_len,
-        mapped_suffix_len,
-        template,
-        template_prefix_len,
-        template_suffix_len,
-    )
-    if split_result is None:
-        return None
-
-    (
-        mapped_prefix,
-        mapped_middle_prefix,
-        mapped_middle_middle,
-        mapped_middle_suffix,
-        mapped_suffix,
-        template_prefix,
-        template_middle_prefix,
-        template_middle_middle,
-        template_middle_suffix,
-        template_suffix,
-    ) = split_result
-
-    return (
-        mapped_prefix + mapped_middle_prefix,
-        mapped_middle_middle,
-        mapped_middle_suffix + mapped_suffix,
-        template_prefix + template_middle_prefix,
-        template_middle_middle,
-        template_middle_suffix + template_suffix,
-    )
-
-
-def fully_split_with_mapped_and_template(
-    mapped: tuple[Type, ...],
-    mapped_prefix_len: int | None,
-    mapped_suffix_len: int | None,
-    template: tuple[Type, ...],
-    template_prefix_len: int,
-    template_suffix_len: int,
-) -> (
-    tuple[
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-        tuple[Type, ...],
-    ]
-    | None
-):
-    if mapped_prefix_len is not None:
-        assert mapped_suffix_len is not None
-        mapped_prefix, mapped_middle, mapped_suffix = split_with_prefix_and_suffix(
-            tuple(mapped), mapped_prefix_len, mapped_suffix_len
-        )
-    else:
-        mapped_prefix = tuple()
-        mapped_suffix = tuple()
-        mapped_middle = mapped
-
-    template_prefix, template_middle, template_suffix = split_with_prefix_and_suffix(
-        tuple(template), template_prefix_len, template_suffix_len
-    )
-
-    unpack_prefix = find_unpack_in_list(template_middle)
-    if unpack_prefix is None:
-        return (
-            mapped_prefix,
-            (),
-            mapped_middle,
-            (),
-            mapped_suffix,
-            template_prefix,
-            (),
-            template_middle,
-            (),
-            template_suffix,
-        )
-
-    unpack_suffix = len(template_middle) - unpack_prefix - 1
-    # mapped_middle is too short to do the unpack
-    if unpack_prefix + unpack_suffix > len(mapped_middle):
-        return None
-
-    (
-        mapped_middle_prefix,
-        mapped_middle_middle,
-        mapped_middle_suffix,
-    ) = split_with_prefix_and_suffix(mapped_middle, unpack_prefix, unpack_suffix)
-    (
-        template_middle_prefix,
-        template_middle_middle,
-        template_middle_suffix,
-    ) = split_with_prefix_and_suffix(template_middle, unpack_prefix, unpack_suffix)
-
-    return (
-        mapped_prefix,
-        mapped_middle_prefix,
-        mapped_middle_middle,
-        mapped_middle_suffix,
-        mapped_suffix,
-        template_prefix,
-        template_middle_prefix,
-        template_middle_middle,
-        template_middle_suffix,
-        template_suffix,
-    )
-
-
 def extract_unpack(types: Sequence[Type]) -> ProperType | None:
     """Given a list of types, extracts either a single type from an unpack, or returns None."""
     if len(types) == 1:
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index b4cd21aa552c..06f87a26e7a1 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -6460,3 +6460,22 @@ P = ParamSpec("P")
 class C(Generic[P]):
     def __init__(self, fn: Callable[P, int]) -> None: ...
 [builtins fixtures/dict.pyi]
+
+[case testVariadicTupleIncrementalUpdateNoCrash]
+import m
+[file m.py]
+from typing import Any
+from lib import C
+
+x: C[Any]
+[file m.py.2]
+from lib import C
+
+x: C[int]
+[file lib.py]
+from typing import Generic, Tuple, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Tuple[Unpack[Ts]]): ...
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 2b47ff30cdfb..d38d492fe9b2 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -1221,7 +1221,7 @@ def foo(x: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]:
 
 [case testTypeVarTupleWithIsInstance]
 # flags: --warn-unreachable
-from typing import Tuple
+from typing import Generic, Tuple
 from typing_extensions import TypeVarTuple, Unpack
 
 TP = TypeVarTuple("TP")
@@ -1232,4 +1232,287 @@ def test(d: A[int, str]) -> None:
         reveal_type(d)  # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.A[builtins.int, builtins.str]]"
     else:
         reveal_type(d)  # E: Statement is unreachable
+
+class B(Generic[Unpack[TP]]): ...
+
+def test2(d: B[int, str]) -> None:
+    if isinstance(d, B):
+        reveal_type(d)  # N: Revealed type is "__main__.B[builtins.int, builtins.str]"
+    else:
+        reveal_type(d)  # E: Statement is unreachable
 [builtins fixtures/isinstancelist.pyi]
+
+[case testVariadicTupleSubtyping]
+from typing import Tuple
+from typing_extensions import Unpack
+
+def f1(x: Tuple[float, ...]) -> None: ...
+def f2(x: Tuple[float, Unpack[Tuple[float, ...]]]) -> None: ...
+def f3(x: Tuple[Unpack[Tuple[float, ...]], float]) -> None: ...
+def f4(x: Tuple[float, Unpack[Tuple[float, ...]], float]) -> None: ...
+
+t1: Tuple[int, int]
+t2: Tuple[int, Unpack[Tuple[int, ...]]]
+t3: Tuple[Unpack[Tuple[int, ...]], int]
+t4: Tuple[int, Unpack[Tuple[int, ...]], int]
+t5: Tuple[int, ...]
+
+tl: Tuple[int, int, Unpack[Tuple[int, ...]]]
+tr: Tuple[Unpack[Tuple[int, ...]], int, int]
+
+f1(t1)
+f1(t2)
+f1(t3)
+f1(t4)
+f1(t5)
+
+f1(tl)
+f1(tr)
+
+f2(t1)
+f2(t2)
+f2(t3)
+f2(t4)
+f2(t5)  # E: Argument 1 to "f2" has incompatible type "Tuple[int, ...]"; expected "Tuple[float, Unpack[Tuple[float, ...]]]"
+
+f2(tl)
+f2(tr)
+
+f3(t1)
+f3(t2)
+f3(t3)
+f3(t4)
+f3(t5)  # E: Argument 1 to "f3" has incompatible type "Tuple[int, ...]"; expected "Tuple[Unpack[Tuple[float, ...]], float]"
+
+f3(tl)
+f3(tr)
+
+f4(t1)
+f4(t2)  # E: Argument 1 to "f4" has incompatible type "Tuple[int, Unpack[Tuple[int, ...]]]"; expected "Tuple[float, Unpack[Tuple[float, ...]], float]"
+f4(t3)  # E: Argument 1 to "f4" has incompatible type "Tuple[Unpack[Tuple[int, ...]], int]"; expected "Tuple[float, Unpack[Tuple[float, ...]], float]"
+f4(t4)
+f4(t5)  # E: Argument 1 to "f4" has incompatible type "Tuple[int, ...]"; expected "Tuple[float, Unpack[Tuple[float, ...]], float]"
+
+f4(tl)
+f4(tr)
+
+t5_verbose: Tuple[Unpack[Tuple[int, ...]]]
+t5 = t5_verbose  # OK
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicTupleInference]
+from typing import List, Tuple, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+T = TypeVar("T")
+def f(x: Tuple[int, Unpack[Tuple[T, ...]]]) -> T: ...
+
+vt0: Tuple[int, ...]
+f(vt0)  # E: Argument 1 to "f" has incompatible type "Tuple[int, ...]"; expected "Tuple[int, Unpack[Tuple[int, ...]]]"
+
+vt1: Tuple[Unpack[Tuple[int, ...]], int]
+reveal_type(f(vt1))  # N: Revealed type is "builtins.int"
+
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+def g(x: Tuple[T, Unpack[Ts], S]) -> Tuple[T, Unpack[Ts], S]: ...
+g(vt0)  # E: Argument 1 to "g" has incompatible type "Tuple[int, ...]"; expected "Tuple[int, Unpack[Tuple[int, ...]], int]"
+
+U = TypeVar("U")
+def h(x: List[Tuple[T, S, U]]) -> Tuple[T, S, U]: ...
+vt2: Tuple[Unpack[Tuple[int, ...]], int]
+vt2 = h(reveal_type([]))  # N: Revealed type is "builtins.list[Tuple[builtins.int, builtins.int, builtins.int]]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicSelfTypeErasure]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class Array(Generic[Unpack[Ts]]):
+    def _close(self) -> None: ...
+
+    def close(self) -> None:
+        self._close()
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicSubclassFixed]
+from typing import Generic, Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class B(Generic[Unpack[Ts]]): ...
+class C(B[int, str]): ...
+class D(B[Unpack[Tuple[int, ...]]]): ...
+
+def fii(x: B[int, int]) -> None: ...
+def fis(x: B[int, str]) -> None: ...
+def fiv(x: B[Unpack[Tuple[int, ...]]]) -> None: ...
+
+fii(C())  # E: Argument 1 to "fii" has incompatible type "C"; expected "B[int, int]"
+fii(D())  # E: Argument 1 to "fii" has incompatible type "D"; expected "B[int, int]"
+fis(C())
+fis(D())  # E: Argument 1 to "fis" has incompatible type "D"; expected "B[int, str]"
+fiv(C())  # E: Argument 1 to "fiv" has incompatible type "C"; expected "B[Unpack[Tuple[int, ...]]]"
+fiv(D())
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicSubclassSame]
+from typing import Generic, Tuple, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class B(Generic[Unpack[Ts]]): ...
+class C(B[Unpack[Ts]]): ...
+
+def fii(x: B[int, int]) -> None: ...
+def fis(x: B[int, str]) -> None: ...
+def fiv(x: B[Unpack[Tuple[int, ...]]]) -> None: ...
+
+cii: C[int, int]
+cis: C[int, str]
+civ: C[Unpack[Tuple[int, ...]]]
+
+fii(cii)
+fii(cis)  # E: Argument 1 to "fii" has incompatible type "C[int, str]"; expected "B[int, int]"
+fii(civ)  # E: Argument 1 to "fii" has incompatible type "C[Unpack[Tuple[int, ...]]]"; expected "B[int, int]"
+
+fis(cii)  # E: Argument 1 to "fis" has incompatible type "C[int, int]"; expected "B[int, str]"
+fis(cis)
+fis(civ)  # E: Argument 1 to "fis" has incompatible type "C[Unpack[Tuple[int, ...]]]"; expected "B[int, str]"
+
+fiv(cii)
+fiv(cis)  # E: Argument 1 to "fiv" has incompatible type "C[int, str]"; expected "B[Unpack[Tuple[int, ...]]]"
+fiv(civ)
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicSubclassExtra]
+from typing import Generic, Tuple, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class B(Generic[Unpack[Ts]]): ...
+
+T = TypeVar("T")
+class C(B[int, Unpack[Ts], T]): ...
+
+def ff(x: B[int, int, int]) -> None: ...
+def fv(x: B[Unpack[Tuple[int, ...]]]) -> None: ...
+
+cii: C[int, int]
+cis: C[int, str]
+civ: C[Unpack[Tuple[int, ...]]]
+
+ff(cii)
+ff(cis)  # E: Argument 1 to "ff" has incompatible type "C[int, str]"; expected "B[int, int, int]"
+ff(civ)  # E: Argument 1 to "ff" has incompatible type "C[Unpack[Tuple[int, ...]]]"; expected "B[int, int, int]"
+
+fv(cii)
+fv(cis)  # E: Argument 1 to "fv" has incompatible type "C[int, str]"; expected "B[Unpack[Tuple[int, ...]]]"
+fv(civ)
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicSubclassVariadic]
+from typing import Generic, Tuple, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class B(Generic[Unpack[Ts]]): ...
+T = TypeVar("T")
+class C(B[Unpack[Tuple[T, ...]]]): ...
+
+def ff(x: B[int, int]) -> None: ...
+def fv(x: B[Unpack[Tuple[int, ...]]]) -> None: ...
+
+ci: C[int]
+ff(ci)  # E: Argument 1 to "ff" has incompatible type "C[int]"; expected "B[int, int]"
+fv(ci)
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicSubclassMethodAccess]
+from typing import Generic, Tuple, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class B(Generic[Unpack[Ts]]):
+    def meth(self) -> Tuple[Unpack[Ts]]: ...
+
+class C1(B[int, str]): ...
+class C2(B[Unpack[Ts]]): ...
+T = TypeVar("T")
+class C3(B[int, Unpack[Ts], T]): ...
+class C4(B[Unpack[Tuple[T, ...]]]): ...
+
+c1: C1
+reveal_type(c1.meth())  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
+
+c2f: C2[int, str]
+c2v: C2[Unpack[Tuple[int, ...]]]
+reveal_type(c2f.meth())  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
+reveal_type(c2v.meth())  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+
+c3f: C3[int, str]
+c3v: C3[Unpack[Tuple[int, ...]]]
+reveal_type(c3f.meth())  # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.str]"
+reveal_type(c3v.meth())  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]], builtins.int]"
+
+c4: C4[int]
+reveal_type(c4.meth())  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicTupleAnySubtype]
+from typing import Any, Generic, Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class B(Generic[Unpack[Ts]]): ...
+class C1(B[Unpack[Tuple[Any, ...]]]): ...
+c1 = C1()
+class C2(B): ...
+c2 = C2()
+x: B[int, str]
+x = c1
+x = c2
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicTupleAnySubtypeTupleType]
+from typing import Any, Generic, Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class B(Tuple[Unpack[Ts]]): ...
+class C1(B[Unpack[Tuple[Any, ...]]]): ...
+c1 = C1()
+class C2(B): ...
+c2 = C2()
+x: B[int, str]
+x = c1
+x = c2
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleAnyOverload]
+from typing import Any, Generic, overload, Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class Array(Generic[Unpack[Ts]]): ...
+
+class A:
+    @overload
+    def f(self, x: Tuple[Unpack[Ts]]) -> Array[Unpack[Ts]]: ...
+    @overload
+    def f(self, x: Any) -> Any: ...
+    def f(self, x: Any) -> Any:
+        ...
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleInferAgainstAny]
+from typing import Any, Tuple, TypeVar
+from typing_extensions import Unpack
+
+T = TypeVar("T")
+
+def test(x: int, t: Tuple[T, ...]) -> Tuple[int, Unpack[Tuple[T, ...]]]:
+    ...
+a: Any = test(42, ())
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test
index 71a5c6dd87b5..5e05d099b958 100644
--- a/test-data/unit/semanal-types.test
+++ b/test-data/unit/semanal-types.test
@@ -1559,8 +1559,8 @@ MypyFile:1(
   ImportFrom:1(typing_extensions, [TypeVarTuple])
   AssignmentStmt:2(
     NameExpr(TV* [__main__.TV])
-    TypeVarTupleExpr:2()))
-
+    TypeVarTupleExpr:2(
+      UpperBound(builtins.tuple[builtins.object, ...]))))
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTupleCallable]
@@ -1576,7 +1576,8 @@ MypyFile:1(
   ImportFrom:2(typing, [Callable])
   AssignmentStmt:3(
     NameExpr(Ts* [__main__.Ts])
-    TypeVarTupleExpr:3())
+    TypeVarTupleExpr:3(
+      UpperBound(builtins.tuple[builtins.object, ...])))
   FuncDef:5(
     foo
     Args(

From f41e24c8b31a110c2f01a753acba458977e41bfc Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 14 Sep 2023 10:42:34 +0100
Subject: [PATCH 124/288] Lenient handling of trivial Callable suffixes
 (#15913)

Fixes https://github.com/python/mypy/issues/15734
Fixes https://github.com/python/mypy/issues/15188
Fixes https://github.com/python/mypy/issues/14321
Fixes https://github.com/python/mypy/issues/13107 (plain Callable was
already working, this fixes the protocol example)
Fixes https://github.com/python/mypy/issues/16058

It looks like treating trivial suffixes (especially for erased
callables) as "whatever works" is a right thing, because it reflects the
whole idea of why we normally check subtyping with respect to an e.g.
erased type. As you can see this fixes a bunch of issues. Note it was
necessary to make couple more tweaks to make everything work smoothly:
* Adjust self-type erasure level in `checker.py` to match other places.
* Explicitly allow `Callable` as a `self`/`cls` annotation (actually I
am not sure we need to keep this check at all, since we now have good
inference for self-types, and we check they are safe either at
definition site or at call site).
---
 mypy/checker.py                               |   4 +-
 mypy/checkmember.py                           |   2 +
 mypy/messages.py                              |   3 +
 mypy/subtypes.py                              |  19 ++-
 mypy/typeops.py                               |   4 +
 test-data/unit/check-callable.test            |  31 ++++
 test-data/unit/check-modules.test             |  12 +-
 .../unit/check-parameter-specification.test   | 139 +++++++++++++++++-
 test-data/unit/fixtures/paramspec.pyi         |   1 +
 9 files changed, 204 insertions(+), 11 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 5a74f019dcf4..95a65b0a8cd1 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -1208,7 +1208,9 @@ def check_func_def(
                     ):
                         if defn.is_class or defn.name == "__new__":
                             ref_type = mypy.types.TypeType.make_normalized(ref_type)
-                        erased = get_proper_type(erase_to_bound(arg_type))
+                        # This level of erasure matches the one in checkmember.check_self_arg(),
+                        # better keep these two checks consistent.
+                        erased = get_proper_type(erase_typevars(erase_to_bound(arg_type)))
                         if not is_subtype(ref_type, erased, ignore_type_params=True):
                             if (
                                 isinstance(erased, Instance)
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 60430839ff62..59af0d402e14 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -896,6 +896,8 @@ def f(self: S) -> T: ...
             return functype
         else:
             selfarg = get_proper_type(item.arg_types[0])
+            # This level of erasure matches the one in checker.check_func_def(),
+            # better keep these two checks consistent.
             if subtypes.is_subtype(dispatched_arg_type, erase_typevars(erase_to_bound(selfarg))):
                 new_items.append(item)
             elif isinstance(selfarg, ParamSpecType):
diff --git a/mypy/messages.py b/mypy/messages.py
index b6fdaf06a8e0..8bc190b7d66d 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -2132,6 +2132,9 @@ def report_protocol_problems(
             not is_subtype(subtype, erase_type(supertype), options=self.options)
             or not subtype.type.defn.type_vars
             or not supertype.type.defn.type_vars
+            # Always show detailed message for ParamSpec
+            or subtype.type.has_param_spec_type
+            or supertype.type.has_param_spec_type
         ):
             type_name = format_type(subtype, self.options, module_names=True)
             self.note(f"Following member(s) of {type_name} have conflicts:", context, code=code)
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index fdde1c24670e..e8339a8c4d69 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -1519,6 +1519,18 @@ def are_trivial_parameters(param: Parameters | NormalizedCallableType) -> bool:
     )
 
 
+def is_trivial_suffix(param: Parameters | NormalizedCallableType) -> bool:
+    param_star = param.var_arg()
+    param_star2 = param.kw_arg()
+    return (
+        param.arg_kinds[-2:] == [ARG_STAR, ARG_STAR2]
+        and param_star is not None
+        and isinstance(get_proper_type(param_star.typ), AnyType)
+        and param_star2 is not None
+        and isinstance(get_proper_type(param_star2.typ), AnyType)
+    )
+
+
 def are_parameters_compatible(
     left: Parameters | NormalizedCallableType,
     right: Parameters | NormalizedCallableType,
@@ -1540,6 +1552,7 @@ def are_parameters_compatible(
     # Treat "def _(*a: Any, **kw: Any) -> X" similarly to "Callable[..., X]"
     if are_trivial_parameters(right):
         return True
+    trivial_suffix = is_trivial_suffix(right)
 
     # Match up corresponding arguments and check them for compatibility. In
     # every pair (argL, argR) of corresponding arguments from L and R, argL must
@@ -1570,7 +1583,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
         if right_arg is None:
             return False
         if left_arg is None:
-            return not allow_partial_overlap
+            return not allow_partial_overlap and not trivial_suffix
         return not is_compat(right_arg.typ, left_arg.typ)
 
     if _incompatible(left_star, right_star) or _incompatible(left_star2, right_star2):
@@ -1594,7 +1607,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
     #           arguments. Get all further positional args of left, and make sure
     #           they're more general than the corresponding member in right.
     # TODO: are we handling UnpackType correctly here?
-    if right_star is not None:
+    if right_star is not None and not trivial_suffix:
         # Synthesize an anonymous formal argument for the right
         right_by_position = right.try_synthesizing_arg_from_vararg(None)
         assert right_by_position is not None
@@ -1621,7 +1634,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
     # Phase 1d: Check kw args. Right has an infinite series of optional named
     #           arguments. Get all further named args of left, and make sure
     #           they're more general than the corresponding member in right.
-    if right_star2 is not None:
+    if right_star2 is not None and not trivial_suffix:
         right_names = {name for name in right.arg_names if name is not None}
         left_only_names = set()
         for name, kind in zip(left.arg_names, left.arg_kinds):
diff --git a/mypy/typeops.py b/mypy/typeops.py
index 3f50232f04c1..10efa32c4b91 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -251,6 +251,10 @@ def supported_self_type(typ: ProperType) -> bool:
     """
     if isinstance(typ, TypeType):
         return supported_self_type(typ.item)
+    if isinstance(typ, CallableType):
+        # Special case: allow class callable instead of Type[...] as cls annotation,
+        # as well as callable self for callback protocols.
+        return True
     return isinstance(typ, TypeVarType) or (
         isinstance(typ, Instance) and typ != fill_typevars(typ.type)
     )
diff --git a/test-data/unit/check-callable.test b/test-data/unit/check-callable.test
index 07c42de74bb3..8a611a689be5 100644
--- a/test-data/unit/check-callable.test
+++ b/test-data/unit/check-callable.test
@@ -598,3 +598,34 @@ a: A
 a()  # E: Missing positional argument "other" in call to "__call__" of "A"
 a(a)
 a(lambda: None)
+
+[case testCallableSubtypingTrivialSuffix]
+from typing import Any, Protocol
+
+class Call(Protocol):
+    def __call__(self, x: int, *args: Any, **kwargs: Any) -> None: ...
+
+def f1() -> None: ...
+a1: Call = f1  # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "Call") \
+               # N: "Call.__call__" has type "Callable[[Arg(int, 'x'), VarArg(Any), KwArg(Any)], None]"
+def f2(x: str) -> None: ...
+a2: Call = f2  # E: Incompatible types in assignment (expression has type "Callable[[str], None]", variable has type "Call") \
+               # N: "Call.__call__" has type "Callable[[Arg(int, 'x'), VarArg(Any), KwArg(Any)], None]"
+def f3(y: int) -> None: ...
+a3: Call = f3  # E: Incompatible types in assignment (expression has type "Callable[[int], None]", variable has type "Call") \
+               # N: "Call.__call__" has type "Callable[[Arg(int, 'x'), VarArg(Any), KwArg(Any)], None]"
+def f4(x: int) -> None: ...
+a4: Call = f4
+
+def f5(x: int, y: int) -> None: ...
+a5: Call = f5
+
+def f6(x: int, y: int = 0) -> None: ...
+a6: Call = f6
+
+def f7(x: int, *, y: int) -> None: ...
+a7: Call = f7
+
+def f8(x: int, *args: int, **kwargs: str) -> None: ...
+a8: Call = f8
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
index 3da5996ed274..94368f6c1113 100644
--- a/test-data/unit/check-modules.test
+++ b/test-data/unit/check-modules.test
@@ -3193,7 +3193,7 @@ from test1 import aaaa  # E: Module "test1" has no attribute "aaaa"
 import b
 [file a.py]
 class Foo:
-    def frobnicate(self, x, *args, **kwargs): pass
+    def frobnicate(self, x: str, *args, **kwargs): pass
 [file b.py]
 from a import Foo
 class Bar(Foo):
@@ -3201,21 +3201,21 @@ class Bar(Foo):
 [file b.py.2]
 from a import Foo
 class Bar(Foo):
-    def frobnicate(self, *args) -> None: pass
+    def frobnicate(self, *args: int) -> None: pass
 [file b.py.3]
 from a import Foo
 class Bar(Foo):
-    def frobnicate(self, *args) -> None: pass # type: ignore[override] # I know
+    def frobnicate(self, *args: int) -> None: pass # type: ignore[override] # I know
 [builtins fixtures/dict.pyi]
 [out1]
 tmp/b.py:3: error: Signature of "frobnicate" incompatible with supertype "Foo"
 tmp/b.py:3: note:      Superclass:
-tmp/b.py:3: note:          def frobnicate(self, x: Any, *args: Any, **kwargs: Any) -> Any
+tmp/b.py:3: note:          def frobnicate(self, x: str, *args: Any, **kwargs: Any) -> Any
 tmp/b.py:3: note:      Subclass:
 tmp/b.py:3: note:          def frobnicate(self) -> None
 [out2]
 tmp/b.py:3: error: Signature of "frobnicate" incompatible with supertype "Foo"
 tmp/b.py:3: note:      Superclass:
-tmp/b.py:3: note:          def frobnicate(self, x: Any, *args: Any, **kwargs: Any) -> Any
+tmp/b.py:3: note:          def frobnicate(self, x: str, *args: Any, **kwargs: Any) -> Any
 tmp/b.py:3: note:      Subclass:
-tmp/b.py:3: note:          def frobnicate(self, *args: Any) -> None
+tmp/b.py:3: note:          def frobnicate(self, *args: int) -> None
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index d80069644194..da831d29dd43 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1729,7 +1729,12 @@ class A(Protocol[P]):
         ...
 
 def bar(b: A[P]) -> A[Concatenate[int, P]]:
-    return b  # E: Incompatible return value type (got "A[P]", expected "A[[int, **P]]")
+    return b  # E: Incompatible return value type (got "A[P]", expected "A[[int, **P]]") \
+              # N: Following member(s) of "A[P]" have conflicts: \
+              # N:     Expected: \
+              # N:         def foo(self, int, /, *args: P.args, **kwargs: P.kwargs) -> Any \
+              # N:     Got: \
+              # N:         def foo(self, *args: P.args, **kwargs: P.kwargs) -> Any
 [builtins fixtures/paramspec.pyi]
 
 [case testParamSpecPrefixSubtypingValidNonStrict]
@@ -1825,6 +1830,138 @@ c: C[int, [int, str], str]  # E: Nested parameter specifications are not allowed
 reveal_type(c)  # N: Revealed type is "__main__.C[Any]"
 [builtins fixtures/paramspec.pyi]
 
+[case testParamSpecConcatenateSelfType]
+from typing import Callable
+from typing_extensions import ParamSpec, Concatenate
+
+P = ParamSpec("P")
+class A:
+    def __init__(self, a_param_1: str) -> None: ...
+
+    @classmethod
+    def add_params(cls: Callable[P, A]) -> Callable[Concatenate[float, P], A]:
+        def new_constructor(i: float, *args: P.args, **kwargs: P.kwargs) -> A:
+            return cls(*args, **kwargs)
+        return new_constructor
+
+    @classmethod
+    def remove_params(cls: Callable[Concatenate[str, P], A]) -> Callable[P, A]:
+        def new_constructor(*args: P.args, **kwargs: P.kwargs) -> A:
+            return cls("my_special_str", *args, **kwargs)
+        return new_constructor
+
+reveal_type(A.add_params())  # N: Revealed type is "def (builtins.float, a_param_1: builtins.str) -> __main__.A"
+reveal_type(A.remove_params())  # N: Revealed type is "def () -> __main__.A"
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecConcatenateCallbackProtocol]
+from typing import Protocol, TypeVar
+from typing_extensions import ParamSpec, Concatenate
+
+P = ParamSpec("P")
+R = TypeVar("R", covariant=True)
+
+class Path: ...
+
+class Function(Protocol[P, R]):
+    def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: ...
+
+def file_cache(fn: Function[Concatenate[Path, P], R]) -> Function[P, R]:
+    def wrapper(*args: P.args, **kw: P.kwargs) -> R:
+        return fn(Path(), *args, **kw)
+    return wrapper
+
+@file_cache
+def get_thing(path: Path, *, some_arg: int) -> int: ...
+reveal_type(get_thing)  # N: Revealed type is "__main__.Function[[*, some_arg: builtins.int], builtins.int]"
+get_thing(some_arg=1)  # OK
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecConcatenateKeywordOnly]
+from typing import Callable, TypeVar
+from typing_extensions import ParamSpec, Concatenate
+
+P = ParamSpec("P")
+R = TypeVar("R")
+
+class Path: ...
+
+def file_cache(fn: Callable[Concatenate[Path, P], R]) -> Callable[P, R]:
+    def wrapper(*args: P.args, **kw: P.kwargs) -> R:
+        return fn(Path(), *args, **kw)
+    return wrapper
+
+@file_cache
+def get_thing(path: Path, *, some_arg: int) -> int: ...
+reveal_type(get_thing)  # N: Revealed type is "def (*, some_arg: builtins.int) -> builtins.int"
+get_thing(some_arg=1)  # OK
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecConcatenateCallbackApply]
+from typing import Callable, Protocol
+from typing_extensions import ParamSpec, Concatenate
+
+P = ParamSpec("P")
+
+class FuncType(Protocol[P]):
+    def __call__(self, x: int, s: str, *args: P.args, **kw_args: P.kwargs) -> str: ...
+
+def forwarder1(fp: FuncType[P], *args: P.args, **kw_args: P.kwargs) -> str:
+    return fp(0, '', *args, **kw_args)
+
+def forwarder2(fp: Callable[Concatenate[int, str, P], str], *args: P.args, **kw_args: P.kwargs) -> str:
+    return fp(0, '', *args, **kw_args)
+
+def my_f(x: int, s: str, d: bool) -> str: ...
+forwarder1(my_f, True)  # OK
+forwarder2(my_f, True)  # OK
+forwarder1(my_f, 1.0)  # E: Argument 2 to "forwarder1" has incompatible type "float"; expected "bool"
+forwarder2(my_f, 1.0)  # E: Argument 2 to "forwarder2" has incompatible type "float"; expected "bool"
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecCallbackProtocolSelf]
+from typing import Callable, Protocol, TypeVar
+from typing_extensions import ParamSpec, Concatenate
+
+Params = ParamSpec("Params")
+Result = TypeVar("Result", covariant=True)
+
+class FancyMethod(Protocol):
+    def __call__(self, arg1: int, arg2: str) -> bool: ...
+    def return_me(self: Callable[Params, Result]) -> Callable[Params, Result]: ...
+    def return_part(self: Callable[Concatenate[int, Params], Result]) -> Callable[Params, Result]: ...
+
+m: FancyMethod
+reveal_type(m.return_me())  # N: Revealed type is "def (arg1: builtins.int, arg2: builtins.str) -> builtins.bool"
+reveal_type(m.return_part())  # N: Revealed type is "def (arg2: builtins.str) -> builtins.bool"
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecInferenceCallableAgainstAny]
+from typing import Callable, TypeVar, Any
+from typing_extensions import ParamSpec, Concatenate
+
+_P = ParamSpec("_P")
+_R = TypeVar("_R")
+
+class A: ...
+a = A()
+
+def a_func(
+    func: Callable[Concatenate[A, _P], _R],
+) -> Callable[Concatenate[Any, _P], _R]:
+    def wrapper(__a: Any, *args: _P.args, **kwargs: _P.kwargs) -> _R:
+        return func(a, *args, **kwargs)
+    return wrapper
+
+def test(a, *args): ...
+x: Any
+y: object
+
+a_func(test)
+x = a_func(test)
+y = a_func(test)
+[builtins fixtures/paramspec.pyi]
+
 [case testParamSpecInferenceWithCallbackProtocol]
 from typing import Protocol, Callable, ParamSpec
 
diff --git a/test-data/unit/fixtures/paramspec.pyi b/test-data/unit/fixtures/paramspec.pyi
index 9b0089f6a7e9..dfb5e126f242 100644
--- a/test-data/unit/fixtures/paramspec.pyi
+++ b/test-data/unit/fixtures/paramspec.pyi
@@ -16,6 +16,7 @@ class object:
 
 class function: ...
 class ellipsis: ...
+class classmethod: ...
 
 class type:
     def __init__(self, *a: object) -> None: ...

From 2c2d126cc742f2467045d36780c33bb8fb77a614 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Thu, 14 Sep 2023 14:27:54 -0700
Subject: [PATCH 125/288] Fix tuple[Any, ...] subtyping (#16108)

Follow up to #16073 and #16076
Fix needed for
https://github.com/python/mypy/pull/16053/files#r1316481395

I add test cases that would have caught my previous incorrect PR. I add
an explicit case for the new desirable behaviour we see with zip.
---
 mypy/main.py                     |   2 +-
 mypy/subtypes.py                 |   6 +-
 test-data/unit/check-tuples.test | 164 +++++++++++++++++++++++++++++--
 3 files changed, 160 insertions(+), 12 deletions(-)

diff --git a/mypy/main.py b/mypy/main.py
index a4357dca7890..3eb8a76a6de3 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -1359,7 +1359,7 @@ def set_strict_flags() -> None:
             parser.error("Can only find occurrences of class members.")
         if len(_find_occurrences) != 2:
             parser.error("Can only find occurrences of non-nested class members.")
-        state.find_occurrences = _find_occurrences  # type: ignore[assignment]
+        state.find_occurrences = _find_occurrences
 
     # Set reports.
     for flag, val in vars(special_opts).items():
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index e8339a8c4d69..9ed2e4af4051 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -463,8 +463,10 @@ def visit_instance(self, left: Instance) -> bool:
                         assert unpacked.type.fullname == "builtins.tuple"
                         if isinstance(get_proper_type(unpacked.args[0]), AnyType):
                             return not self.proper_subtype
-            # TODO: we need a special case similar to above to consider (something that maps to)
-            # tuple[Any, ...] a subtype of Tuple[<whatever>].
+                if mapped.type.fullname == "builtins.tuple" and isinstance(
+                    get_proper_type(mapped.args[0]), AnyType
+                ):
+                    return not self.proper_subtype
             return False
         if isinstance(right, TypeVarTupleType):
             # tuple[Any, ...] is like Any in the world of tuples (see special case above).
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 0e7c81edc498..391fa20db738 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -107,19 +107,147 @@ class A: pass
 class B(A): pass
 [builtins fixtures/tuple.pyi]
 
-[case testSubtypingWithNamedTupleType]
-from typing import Tuple
-t1: Tuple[A, A]
-t2: tuple
-
-if int():
-    t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[Any, ...]", variable has type "Tuple[A, A]")
-if int():
-    t2 = t1
+[case testSubtypingWithTupleType]
+from __future__ import annotations
+from typing import Any, Tuple
+
+tuple_aa: tuple[A, A]
+Tuple_aa: Tuple[A, A]
+
+tuple_obj: tuple[object, ...]
+Tuple_obj: Tuple[object, ...]
+
+tuple_obj_one: tuple[object]
+Tuple_obj_one: Tuple[object]
+
+tuple_obj_two: tuple[object, object]
+Tuple_obj_two: Tuple[object, object]
+
+tuple_any_implicit: tuple
+Tuple_any_implicit: Tuple
+
+tuple_any: tuple[Any, ...]
+Tuple_any: Tuple[Any, ...]
+
+tuple_any_one: tuple[Any]
+Tuple_any_one: Tuple[Any]
+
+tuple_any_two: tuple[Any, Any]
+Tuple_any_two: Tuple[Any, Any]
+
+def takes_tuple_aa(t: tuple[A, A]): ...
+
+takes_tuple_aa(tuple_aa)
+takes_tuple_aa(Tuple_aa)
+takes_tuple_aa(tuple_obj)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object, ...]"; expected "Tuple[A, A]"
+takes_tuple_aa(Tuple_obj)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object, ...]"; expected "Tuple[A, A]"
+takes_tuple_aa(tuple_obj_one)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object]"; expected "Tuple[A, A]"
+takes_tuple_aa(Tuple_obj_one)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object]"; expected "Tuple[A, A]"
+takes_tuple_aa(tuple_obj_two)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object, object]"; expected "Tuple[A, A]"
+takes_tuple_aa(Tuple_obj_two)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object, object]"; expected "Tuple[A, A]"
+takes_tuple_aa(tuple_any_implicit)
+takes_tuple_aa(Tuple_any_implicit)
+takes_tuple_aa(tuple_any)
+takes_tuple_aa(Tuple_any)
+takes_tuple_aa(tuple_any_one)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[Any]"; expected "Tuple[A, A]"
+takes_tuple_aa(Tuple_any_one)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[Any]"; expected "Tuple[A, A]"
+takes_tuple_aa(tuple_any_two)
+takes_tuple_aa(Tuple_any_two)
+
+def takes_tuple_any_implicit(t: tuple): ...
+
+takes_tuple_any_implicit(tuple_aa)
+takes_tuple_any_implicit(Tuple_aa)
+takes_tuple_any_implicit(tuple_obj)
+takes_tuple_any_implicit(Tuple_obj)
+takes_tuple_any_implicit(tuple_obj_one)
+takes_tuple_any_implicit(Tuple_obj_one)
+takes_tuple_any_implicit(tuple_obj_two)
+takes_tuple_any_implicit(Tuple_obj_two)
+takes_tuple_any_implicit(tuple_any_implicit)
+takes_tuple_any_implicit(Tuple_any_implicit)
+takes_tuple_any_implicit(tuple_any)
+takes_tuple_any_implicit(Tuple_any)
+takes_tuple_any_implicit(tuple_any_one)
+takes_tuple_any_implicit(Tuple_any_one)
+takes_tuple_any_implicit(tuple_any_two)
+takes_tuple_any_implicit(Tuple_any_two)
+
+def takes_tuple_any_one(t: tuple[Any]): ...
+
+takes_tuple_any_one(tuple_aa)  # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[A, A]"; expected "Tuple[Any]"
+takes_tuple_any_one(Tuple_aa)  # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[A, A]"; expected "Tuple[Any]"
+takes_tuple_any_one(tuple_obj)  # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[object, ...]"; expected "Tuple[Any]"
+takes_tuple_any_one(Tuple_obj)  # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[object, ...]"; expected "Tuple[Any]"
+takes_tuple_any_one(tuple_obj_one)
+takes_tuple_any_one(Tuple_obj_one)
+takes_tuple_any_one(tuple_obj_two)  # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[object, object]"; expected "Tuple[Any]"
+takes_tuple_any_one(Tuple_obj_two)  # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[object, object]"; expected "Tuple[Any]"
+takes_tuple_any_one(tuple_any_implicit)
+takes_tuple_any_one(Tuple_any_implicit)
+takes_tuple_any_one(tuple_any)
+takes_tuple_any_one(Tuple_any)
+takes_tuple_any_one(tuple_any_one)
+takes_tuple_any_one(Tuple_any_one)
+takes_tuple_any_one(tuple_any_two)  # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[Any, Any]"; expected "Tuple[Any]"
+takes_tuple_any_one(Tuple_any_two)  # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[Any, Any]"; expected "Tuple[Any]"
 
 class A: pass
 [builtins fixtures/tuple.pyi]
 
+[case testSubtypingWithTupleTypeSubclass]
+from __future__ import annotations
+from typing import Any, Tuple
+
+class A: ...
+
+inst_tuple_aa: Tuple[A, A]
+
+class tuple_aa_subclass(Tuple[A, A]): ...
+inst_tuple_aa_subclass: tuple_aa_subclass
+
+class tuple_any_subclass(Tuple[Any, ...]): ...
+inst_tuple_any_subclass: tuple_any_subclass
+
+class tuple_any_one_subclass(Tuple[Any]): ...
+inst_tuple_any_one_subclass: tuple_any_one_subclass
+
+class tuple_any_two_subclass(Tuple[Any, Any]): ...
+inst_tuple_any_two_subclass: tuple_any_two_subclass
+
+class tuple_obj_subclass(Tuple[object, ...]): ...
+inst_tuple_obj_subclass: tuple_obj_subclass
+
+class tuple_obj_one_subclass(Tuple[object]): ...
+inst_tuple_obj_one_subclass: tuple_obj_one_subclass
+
+class tuple_obj_two_subclass(Tuple[object, object]): ...
+inst_tuple_obj_two_subclass: tuple_obj_two_subclass
+
+def takes_tuple_aa(t: Tuple[A, A]): ...
+
+takes_tuple_aa(inst_tuple_aa)
+takes_tuple_aa(inst_tuple_aa_subclass)
+takes_tuple_aa(inst_tuple_any_subclass)
+takes_tuple_aa(inst_tuple_any_one_subclass)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "tuple_any_one_subclass"; expected "Tuple[A, A]"
+takes_tuple_aa(inst_tuple_any_two_subclass)
+takes_tuple_aa(inst_tuple_obj_subclass)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "tuple_obj_subclass"; expected "Tuple[A, A]"
+takes_tuple_aa(inst_tuple_obj_one_subclass)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "tuple_obj_one_subclass"; expected "Tuple[A, A]"
+takes_tuple_aa(inst_tuple_obj_two_subclass)  # E: Argument 1 to "takes_tuple_aa" has incompatible type "tuple_obj_two_subclass"; expected "Tuple[A, A]"
+
+def takes_tuple_aa_subclass(t: tuple_aa_subclass): ...
+
+takes_tuple_aa_subclass(inst_tuple_aa)  # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "Tuple[A, A]"; expected "tuple_aa_subclass"
+takes_tuple_aa_subclass(inst_tuple_aa_subclass)
+takes_tuple_aa_subclass(inst_tuple_any_subclass)  # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_any_subclass"; expected "tuple_aa_subclass"
+takes_tuple_aa_subclass(inst_tuple_any_one_subclass)  # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_any_one_subclass"; expected "tuple_aa_subclass"
+takes_tuple_aa_subclass(inst_tuple_any_two_subclass)  # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_any_two_subclass"; expected "tuple_aa_subclass"
+takes_tuple_aa_subclass(inst_tuple_obj_subclass)  # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_obj_subclass"; expected "tuple_aa_subclass"
+takes_tuple_aa_subclass(inst_tuple_obj_one_subclass)  # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_obj_one_subclass"; expected "tuple_aa_subclass"
+takes_tuple_aa_subclass(inst_tuple_obj_two_subclass)  # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_obj_two_subclass"; expected "tuple_aa_subclass"
+
+[builtins fixtures/tuple.pyi]
+
 [case testTupleInitializationWithNone]
 # flags: --no-strict-optional
 from typing import Tuple
@@ -1522,3 +1650,21 @@ class Bar(aaaaaaaaaa):  # E: Name "aaaaaaaaaa" is not defined
 class FooBarTuple(Tuple[Foo, Bar]):
     ...
 [builtins fixtures/tuple.pyi]
+
+
+[case testTupleOverloadZipAny]
+from typing import Any, Iterable, Iterator, Tuple, TypeVar, overload
+
+T = TypeVar("T")
+
+@overload
+def zip(__i: Iterable[T]) -> Iterator[Tuple[T]]: ...
+@overload
+def zip(*i: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ...
+def zip(i): ...
+
+def g(t: Tuple):
+    # Ideally, we'd infer that these are iterators of tuples
+    reveal_type(zip(*t))  # N: Revealed type is "typing.Iterator[Any]"
+    reveal_type(zip(t))  # N: Revealed type is "typing.Iterator[Any]"
+[builtins fixtures/tuple.pyi]

From d77310ae61e8e784aae46b2011f35900b9392e15 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Fri, 15 Sep 2023 00:17:32 -0700
Subject: [PATCH 126/288] Better diffs in tests (#16112)

It's annoying that one line change causes everything else to show up as
a diff. Just use difflib instead. I also highlight the changed lines. We
can't use FancyFormatter because it doesn't work well with pytest.
---
 mypy/test/helpers.py | 128 +++++++++++++++++++++++--------------------
 1 file changed, 68 insertions(+), 60 deletions(-)

diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py
index 7447391593d5..a53e16e27dfa 100644
--- a/mypy/test/helpers.py
+++ b/mypy/test/helpers.py
@@ -1,6 +1,7 @@
 from __future__ import annotations
 
 import contextlib
+import difflib
 import os
 import pathlib
 import re
@@ -43,64 +44,81 @@ def run_mypy(args: list[str]) -> None:
         pytest.fail(msg="Sample check failed", pytrace=False)
 
 
-def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) -> None:
-    """Assert that two string arrays are equal.
+def diff_ranges(
+    left: list[str], right: list[str]
+) -> tuple[list[tuple[int, int]], list[tuple[int, int]]]:
+    seq = difflib.SequenceMatcher(None, left, right)
+    # note last triple is a dummy, so don't need to worry
+    blocks = seq.get_matching_blocks()
 
-    Display any differences in a human-readable form.
-    """
-    actual = clean_up(actual)
-    if actual != expected:
-        num_skip_start = num_skipped_prefix_lines(expected, actual)
-        num_skip_end = num_skipped_suffix_lines(expected, actual)
+    i = 0
+    j = 0
+    left_ranges = []
+    right_ranges = []
+    for block in blocks:
+        # mismatched range
+        left_ranges.append((i, block.a))
+        right_ranges.append((j, block.b))
 
-        sys.stderr.write("Expected:\n")
+        i = block.a + block.size
+        j = block.b + block.size
 
-        # If omit some lines at the beginning, indicate it by displaying a line
-        # with '...'.
-        if num_skip_start > 0:
-            sys.stderr.write("  ...\n")
+        # matched range
+        left_ranges.append((block.a, i))
+        right_ranges.append((block.b, j))
+    return left_ranges, right_ranges
 
-        # Keep track of the first different line.
-        first_diff = -1
 
-        # Display only this many first characters of identical lines.
-        width = 75
+def render_diff_range(
+    ranges: list[tuple[int, int]], content: list[str], colour: str | None = None
+) -> None:
+    for i, line_range in enumerate(ranges):
+        is_matching = i % 2 == 1
+        lines = content[line_range[0] : line_range[1]]
+        for j, line in enumerate(lines):
+            if (
+                is_matching
+                # elide the middle of matching blocks
+                and j >= 3
+                and j < len(lines) - 3
+            ):
+                if j == 3:
+                    sys.stderr.write("  ...\n")
+                continue
 
-        for i in range(num_skip_start, len(expected) - num_skip_end):
-            if i >= len(actual) or expected[i] != actual[i]:
-                if first_diff < 0:
-                    first_diff = i
-                sys.stderr.write(f"  {expected[i]:<45} (diff)")
-            else:
-                e = expected[i]
-                sys.stderr.write("  " + e[:width])
-                if len(e) > width:
-                    sys.stderr.write("...")
-            sys.stderr.write("\n")
-        if num_skip_end > 0:
-            sys.stderr.write("  ...\n")
+            if not is_matching and colour:
+                sys.stderr.write(colour)
 
-        sys.stderr.write("Actual:\n")
+            sys.stderr.write("  " + line)
 
-        if num_skip_start > 0:
-            sys.stderr.write("  ...\n")
+            if not is_matching:
+                if colour:
+                    sys.stderr.write("\033[0m")
+                sys.stderr.write(" (diff)")
 
-        for j in range(num_skip_start, len(actual) - num_skip_end):
-            if j >= len(expected) or expected[j] != actual[j]:
-                sys.stderr.write(f"  {actual[j]:<45} (diff)")
-            else:
-                a = actual[j]
-                sys.stderr.write("  " + a[:width])
-                if len(a) > width:
-                    sys.stderr.write("...")
             sys.stderr.write("\n")
-        if not actual:
-            sys.stderr.write("  (empty)\n")
-        if num_skip_end > 0:
-            sys.stderr.write("  ...\n")
 
-        sys.stderr.write("\n")
 
+def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) -> None:
+    """Assert that two string arrays are equal.
+
+    Display any differences in a human-readable form.
+    """
+    actual = clean_up(actual)
+    if expected != actual:
+        expected_ranges, actual_ranges = diff_ranges(expected, actual)
+        sys.stderr.write("Expected:\n")
+        red = "\033[31m" if sys.platform != "win32" else None
+        render_diff_range(expected_ranges, expected, colour=red)
+        sys.stderr.write("Actual:\n")
+        green = "\033[32m" if sys.platform != "win32" else None
+        render_diff_range(actual_ranges, actual, colour=green)
+
+        sys.stderr.write("\n")
+        first_diff = next(
+            (i for i, (a, b) in enumerate(zip(expected, actual)) if a != b),
+            max(len(expected), len(actual)),
+        )
         if 0 <= first_diff < len(actual) and (
             len(expected[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT
             or len(actual[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT
@@ -109,6 +127,10 @@ def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str)
             # long lines.
             show_align_message(expected[first_diff], actual[first_diff])
 
+        sys.stderr.write(
+            "Update the test output using --update-data -n0 "
+            "(you can additionally use the -k selector to update only specific tests)"
+        )
         pytest.fail(msg, pytrace=False)
 
 
@@ -226,20 +248,6 @@ def local_sys_path_set() -> Iterator[None]:
         sys.path = old_sys_path
 
 
-def num_skipped_prefix_lines(a1: list[str], a2: list[str]) -> int:
-    num_eq = 0
-    while num_eq < min(len(a1), len(a2)) and a1[num_eq] == a2[num_eq]:
-        num_eq += 1
-    return max(0, num_eq - 4)
-
-
-def num_skipped_suffix_lines(a1: list[str], a2: list[str]) -> int:
-    num_eq = 0
-    while num_eq < min(len(a1), len(a2)) and a1[-num_eq - 1] == a2[-num_eq - 1]:
-        num_eq += 1
-    return max(0, num_eq - 4)
-
-
 def testfile_pyversion(path: str) -> tuple[int, int]:
     if path.endswith("python312.test"):
         return 3, 12

From 402c8ffa821d35a68dfe010a59f1dd9ea3dbb02a Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 15 Sep 2023 09:42:20 +0100
Subject: [PATCH 127/288] Fix crash on malformed TypedDict in incremental mode
 (#16115)

Fixes https://github.com/python/mypy/issues/15557

FWIW I simply copy the logic for handling malformed definitions from
named tuples, that seems to be much more robust.
---
 mypy/semanal_typeddict.py             | 14 ++++++++++----
 test-data/unit/check-incremental.test | 25 +++++++++++++++++++++++++
 2 files changed, 35 insertions(+), 4 deletions(-)

diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py
index fb3fa713e3fb..a9a4cd868f27 100644
--- a/mypy/semanal_typeddict.py
+++ b/mypy/semanal_typeddict.py
@@ -366,7 +366,13 @@ def check_typeddict(
         name, items, types, total, tvar_defs, ok = res
         if not ok:
             # Error. Construct dummy return value.
-            info = self.build_typeddict_typeinfo("TypedDict", [], [], set(), call.line, None)
+            if var_name:
+                name = var_name
+                if is_func_scope:
+                    name += "@" + str(call.line)
+            else:
+                name = var_name = "TypedDict@" + str(call.line)
+            info = self.build_typeddict_typeinfo(name, [], [], set(), call.line, None)
         else:
             if var_name is not None and name != var_name:
                 self.fail(
@@ -395,9 +401,9 @@ def check_typeddict(
                 name, items, types, required_keys, call.line, existing_info
             )
             info.line = node.line
-            # Store generated TypeInfo under both names, see semanal_namedtuple for more details.
-            if name != var_name or is_func_scope:
-                self.api.add_symbol_skip_local(name, info)
+        # Store generated TypeInfo under both names, see semanal_namedtuple for more details.
+        if name != var_name or is_func_scope:
+            self.api.add_symbol_skip_local(name, info)
         if var_name:
             self.api.add_symbol(var_name, info, node)
         call.analyzed = TypedDictExpr(info)
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index 06f87a26e7a1..801bbd4e77b4 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -6479,3 +6479,28 @@ from typing_extensions import TypeVarTuple, Unpack
 Ts = TypeVarTuple("Ts")
 class C(Tuple[Unpack[Ts]]): ...
 [builtins fixtures/tuple.pyi]
+
+[case testNoIncrementalCrashOnInvalidTypedDict]
+import m
+[file m.py]
+import counts
+[file m.py.2]
+import counts
+# touch
+[file counts.py]
+from typing_extensions import TypedDict
+Counts = TypedDict("Counts", {k: int for k in "abc"})  # type: ignore
+[builtins fixtures/dict.pyi]
+
+[case testNoIncrementalCrashOnInvalidTypedDictFunc]
+import m
+[file m.py]
+import counts
+[file m.py.2]
+import counts
+# touch
+[file counts.py]
+from typing_extensions import TypedDict
+def test() -> None:
+    Counts = TypedDict("Counts", {k: int for k in "abc"})  # type: ignore
+[builtins fixtures/dict.pyi]

From 2bbc42f898031d2aa3e26f1272604ce879ff57dd Mon Sep 17 00:00:00 2001
From: Ali Hamdan <ali.hamdan.dev@gmail.com>
Date: Fri, 15 Sep 2023 10:44:31 +0200
Subject: [PATCH 128/288] stubgen: generate valid dataclass stubs (#15625)

Fixes #12441
Fixes #9986
Fixes #15966
---
 mypy/stubgen.py             |  57 +++++++++--
 mypy/test/teststubgen.py    |  11 +++
 test-data/unit/stubgen.test | 182 ++++++++++++++++++++++++++++++++++++
 3 files changed, 244 insertions(+), 6 deletions(-)

diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index aca836c52ce8..ca7249465746 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -657,6 +657,7 @@ def __init__(
         self.defined_names: set[str] = set()
         # Short names of methods defined in the body of the current class
         self.method_names: set[str] = set()
+        self.processing_dataclass = False
 
     def visit_mypy_file(self, o: MypyFile) -> None:
         self.module = o.fullname  # Current module being processed
@@ -706,6 +707,12 @@ def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None:
                 self.clear_decorators()
 
     def visit_func_def(self, o: FuncDef) -> None:
+        is_dataclass_generated = (
+            self.analyzed and self.processing_dataclass and o.info.names[o.name].plugin_generated
+        )
+        if is_dataclass_generated and o.name != "__init__":
+            # Skip methods generated by the @dataclass decorator (except for __init__)
+            return
         if (
             self.is_private_name(o.name, o.fullname)
             or self.is_not_in_all(o.name)
@@ -771,6 +778,12 @@ def visit_func_def(self, o: FuncDef) -> None:
             else:
                 arg = name + annotation
             args.append(arg)
+        if o.name == "__init__" and is_dataclass_generated and "**" in args:
+            # The dataclass plugin generates invalid nameless "*" and "**" arguments
+            new_name = "".join(a.split(":", 1)[0] for a in args).replace("*", "")
+            args[args.index("*")] = f"*{new_name}_"  # this name is guaranteed to be unique
+            args[args.index("**")] = f"**{new_name}__"  # same here
+
         retname = None
         if o.name != "__init__" and isinstance(o.unanalyzed_type, CallableType):
             if isinstance(get_proper_type(o.unanalyzed_type.ret_type), AnyType):
@@ -899,6 +912,9 @@ def visit_class_def(self, o: ClassDef) -> None:
         if not self._indent and self._state != EMPTY:
             sep = len(self._output)
             self.add("\n")
+        decorators = self.get_class_decorators(o)
+        for d in decorators:
+            self.add(f"{self._indent}@{d}\n")
         self.add(f"{self._indent}class {o.name}")
         self.record_name(o.name)
         base_types = self.get_base_types(o)
@@ -934,6 +950,7 @@ def visit_class_def(self, o: ClassDef) -> None:
         else:
             self._state = CLASS
         self.method_names = set()
+        self.processing_dataclass = False
         self._current_class = None
 
     def get_base_types(self, cdef: ClassDef) -> list[str]:
@@ -979,6 +996,21 @@ def get_base_types(self, cdef: ClassDef) -> list[str]:
             base_types.append(f"{name}={value.accept(p)}")
         return base_types
 
+    def get_class_decorators(self, cdef: ClassDef) -> list[str]:
+        decorators: list[str] = []
+        p = AliasPrinter(self)
+        for d in cdef.decorators:
+            if self.is_dataclass(d):
+                decorators.append(d.accept(p))
+                self.import_tracker.require_name(get_qualified_name(d))
+                self.processing_dataclass = True
+        return decorators
+
+    def is_dataclass(self, expr: Expression) -> bool:
+        if isinstance(expr, CallExpr):
+            expr = expr.callee
+        return self.get_fullname(expr) == "dataclasses.dataclass"
+
     def visit_block(self, o: Block) -> None:
         # Unreachable statements may be partially uninitialized and that may
         # cause trouble.
@@ -1336,6 +1368,9 @@ def get_init(
                 # Final without type argument is invalid in stubs.
                 final_arg = self.get_str_type_of_node(rvalue)
                 typename += f"[{final_arg}]"
+        elif self.processing_dataclass:
+            # attribute without annotation is not a dataclass field, don't add annotation.
+            return f"{self._indent}{lvalue} = ...\n"
         else:
             typename = self.get_str_type_of_node(rvalue)
         initializer = self.get_assign_initializer(rvalue)
@@ -1343,12 +1378,20 @@ def get_init(
 
     def get_assign_initializer(self, rvalue: Expression) -> str:
         """Does this rvalue need some special initializer value?"""
-        if self._current_class and self._current_class.info:
-            # Current rules
-            # 1. Return `...` if we are dealing with `NamedTuple` and it has an existing default value
-            if self._current_class.info.is_named_tuple and not isinstance(rvalue, TempNode):
-                return " = ..."
-            # TODO: support other possible cases, where initializer is important
+        if not self._current_class:
+            return ""
+        # Current rules
+        # 1. Return `...` if we are dealing with `NamedTuple` or `dataclass` field and
+        #    it has an existing default value
+        if (
+            self._current_class.info
+            and self._current_class.info.is_named_tuple
+            and not isinstance(rvalue, TempNode)
+        ):
+            return " = ..."
+        if self.processing_dataclass and not (isinstance(rvalue, TempNode) and rvalue.no_rhs):
+            return " = ..."
+        # TODO: support other possible cases, where initializer is important
 
         # By default, no initializer is required:
         return ""
@@ -1410,6 +1453,8 @@ def is_private_name(self, name: str, fullname: str | None = None) -> bool:
             return False
         if fullname in EXTRA_EXPORTED:
             return False
+        if name == "_":
+            return False
         return name.startswith("_") and (not name.endswith("__") or name in IGNORED_DUNDERS)
 
     def is_private_member(self, fullname: str) -> bool:
diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py
index 79d380785a39..7e30515ac892 100644
--- a/mypy/test/teststubgen.py
+++ b/mypy/test/teststubgen.py
@@ -724,11 +724,22 @@ def run_case_inner(self, testcase: DataDrivenTestCase) -> None:
 
     def parse_flags(self, program_text: str, extra: list[str]) -> Options:
         flags = re.search("# flags: (.*)$", program_text, flags=re.MULTILINE)
+        pyversion = None
         if flags:
             flag_list = flags.group(1).split()
+            for i, flag in enumerate(flag_list):
+                if flag.startswith("--python-version="):
+                    pyversion = flag.split("=", 1)[1]
+                    del flag_list[i]
+                    break
         else:
             flag_list = []
         options = parse_options(flag_list + extra)
+        if pyversion:
+            # A hack to allow testing old python versions with new language constructs
+            # This should be rarely used in general as stubgen output should not be version-specific
+            major, minor = pyversion.split(".", 1)
+            options.pyversion = (int(major), int(minor))
         if "--verbose" not in flag_list:
             options.quiet = True
         else:
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index 774a17b76161..828680fadcf2 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -3512,3 +3512,185 @@ def gen2() -> _Generator[_Incomplete, _Incomplete, _Incomplete]: ...
 
 class X(_Incomplete): ...
 class Y(_Incomplete): ...
+
+[case testDataclass]
+import dataclasses
+import dataclasses as dcs
+from dataclasses import dataclass, InitVar, KW_ONLY
+from dataclasses import dataclass as dc
+from typing import ClassVar
+
+@dataclasses.dataclass
+class X:
+    a: int
+    b: str = "hello"
+    c: ClassVar
+    d: ClassVar = 200
+    f: list[int] = field(init=False, default_factory=list)
+    g: int = field(default=2, kw_only=True)
+    _: KW_ONLY
+    h: int = 1
+    i: InitVar[str]
+    j: InitVar = 100
+    non_field = None
+
+@dcs.dataclass
+class Y: ...
+
+@dataclass
+class Z: ...
+
+@dc
+class W: ...
+
+@dataclass(init=False, repr=False)
+class V: ...
+
+[out]
+import dataclasses
+import dataclasses as dcs
+from dataclasses import InitVar, KW_ONLY, dataclass, dataclass as dc
+from typing import ClassVar
+
+@dataclasses.dataclass
+class X:
+    a: int
+    b: str = ...
+    c: ClassVar
+    d: ClassVar = ...
+    f: list[int] = ...
+    g: int = ...
+    _: KW_ONLY
+    h: int = ...
+    i: InitVar[str]
+    j: InitVar = ...
+    non_field = ...
+
+@dcs.dataclass
+class Y: ...
+@dataclass
+class Z: ...
+@dc
+class W: ...
+@dataclass(init=False, repr=False)
+class V: ...
+
+[case testDataclass_semanal]
+from dataclasses import dataclass, InitVar
+from typing import ClassVar
+
+@dataclass
+class X:
+    a: int
+    b: str = "hello"
+    c: ClassVar
+    d: ClassVar = 200
+    f: list[int] = field(init=False, default_factory=list)
+    g: int = field(default=2, kw_only=True)
+    h: int = 1
+    i: InitVar[str]
+    j: InitVar = 100
+    non_field = None
+
+@dataclass(init=False, repr=False, frozen=True)
+class Y: ...
+
+[out]
+from dataclasses import InitVar, dataclass
+from typing import ClassVar
+
+@dataclass
+class X:
+    a: int
+    b: str = ...
+    c: ClassVar
+    d: ClassVar = ...
+    f: list[int] = ...
+    g: int = ...
+    h: int = ...
+    i: InitVar[str]
+    j: InitVar = ...
+    non_field = ...
+    def __init__(self, a, b, f, g, h, i, j) -> None: ...
+
+@dataclass(init=False, repr=False, frozen=True)
+class Y: ...
+
+[case testDataclassWithKwOnlyField_semanal]
+# flags: --python-version=3.10
+from dataclasses import dataclass, InitVar, KW_ONLY
+from typing import ClassVar
+
+@dataclass
+class X:
+    a: int
+    b: str = "hello"
+    c: ClassVar
+    d: ClassVar = 200
+    f: list[int] = field(init=False, default_factory=list)
+    g: int = field(default=2, kw_only=True)
+    _: KW_ONLY
+    h: int = 1
+    i: InitVar[str]
+    j: InitVar = 100
+    non_field = None
+
+@dataclass(init=False, repr=False, frozen=True)
+class Y: ...
+
+[out]
+from dataclasses import InitVar, KW_ONLY, dataclass
+from typing import ClassVar
+
+@dataclass
+class X:
+    a: int
+    b: str = ...
+    c: ClassVar
+    d: ClassVar = ...
+    f: list[int] = ...
+    g: int = ...
+    _: KW_ONLY
+    h: int = ...
+    i: InitVar[str]
+    j: InitVar = ...
+    non_field = ...
+    def __init__(self, a, b, f, g, *, h, i, j) -> None: ...
+
+@dataclass(init=False, repr=False, frozen=True)
+class Y: ...
+
+[case testDataclassWithExplicitGeneratedMethodsOverrides_semanal]
+from dataclasses import dataclass
+
+@dataclass
+class X:
+    a: int
+    def __init__(self, a: int, b: str = ...) -> None: ...
+    def __post_init__(self) -> None: ...
+
+[out]
+from dataclasses import dataclass
+
+@dataclass
+class X:
+    a: int
+    def __init__(self, a: int, b: str = ...) -> None: ...
+    def __post_init__(self) -> None: ...
+
+[case testDataclassInheritsFromAny_semanal]
+from dataclasses import dataclass
+import missing
+
+@dataclass
+class X(missing.Base):
+    a: int
+
+[out]
+import missing
+from dataclasses import dataclass
+
+@dataclass
+class X(missing.Base):
+    a: int
+    def __init__(self, *selfa_, a, **selfa__) -> None: ...

From 88ae1e4c1541e5b03d695cf63d1265b972e427d9 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 15 Sep 2023 21:53:35 +0100
Subject: [PATCH 129/288] Fix crash on star unpack in TypedDict (#16116)

Fixes https://github.com/python/mypy/issues/16107
Fixes https://github.com/python/mypy/issues/15891

I only vaguely remember why I added those context managers, it seemed to
me giving full TypedDict as context may cause false positives. But since
the current way causes crashes, let's just not do this (we will see if
there will be actual false positives).
---
 mypy/checkexpr.py                   |  3 +--
 test-data/unit/check-typeddict.test | 12 ++++++++++
 test-data/unit/reports.test         | 34 +++++++++++++++++++++++++++++
 3 files changed, 47 insertions(+), 2 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 22a9852545b7..f46c8cb15c6f 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -813,8 +813,7 @@ def validate_star_typeddict_item(
         Note `result` and `always_present_keys` are updated in place. Return true if the
         expression `item_arg` may valid in `callee` TypedDict context.
         """
-        with self.chk.local_type_map(), self.msg.filter_errors():
-            inferred = get_proper_type(self.accept(item_arg, type_context=callee))
+        inferred = get_proper_type(self.accept(item_arg, type_context=callee))
         possible_tds = []
         if isinstance(inferred, TypedDictType):
             possible_tds = [inferred]
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index b8953f05b6a5..7ee9ef0b708b 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -3224,3 +3224,15 @@ t2: Foo = {**y}  # E: Missing key "a" for TypedDict "Foo"
 t3: Foo = {**z}  # E: Missing key "a" for TypedDict "Foo"
 [builtins fixtures/dict.pyi]
 [typing fixtures/typing-typeddict.pyi]
+
+[case testTypedDictUnpackError]
+from typing import TypedDict
+
+class Foo(TypedDict):
+    a: int
+
+def foo(x: int) -> Foo: ...
+
+f: Foo = {**foo("no")}  # E: Argument 1 to "foo" has incompatible type "str"; expected "int"
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-typeddict.pyi]
diff --git a/test-data/unit/reports.test b/test-data/unit/reports.test
index a6cde503ca09..16061d9c32bf 100644
--- a/test-data/unit/reports.test
+++ b/test-data/unit/reports.test
@@ -69,6 +69,40 @@ def untyped_function():
   </packages>
 </coverage>
 
+[case testCoberturaStarUnpacking]
+# cmd: mypy --cobertura-xml-report build a.py
+[file a.py]
+from typing import TypedDict
+
+class MyDict(TypedDict):
+    a: int
+
+def foo(a: int) -> MyDict:
+    return {"a": a}
+md: MyDict = MyDict(**foo(42))
+[outfile build/cobertura.xml]
+<coverage timestamp="$TIMESTAMP" version="$VERSION" line-rate="0.8333" branch-rate="0">
+  <sources>
+    <source>$PWD</source>
+  </sources>
+  <packages>
+    <package complexity="1.0" name="a" branch-rate="0" line-rate="0.8333">
+      <classes>
+        <class complexity="1.0" filename="a.py" name="a.py" branch-rate="0" line-rate="0.8333">
+          <methods/>
+          <lines>
+            <line branch="false" hits="1" number="1" precision="precise"/>
+            <line branch="false" hits="1" number="3" precision="precise"/>
+            <line branch="false" hits="0" number="4" precision="any"/>
+            <line branch="false" hits="1" number="6" precision="precise"/>
+            <line branch="false" hits="1" number="7" precision="precise"/>
+            <line branch="false" hits="1" number="8" precision="precise"/>
+          </lines>
+        </class>
+      </classes>
+    </package>
+  </packages>
+</coverage>
 
 [case testAnyExprReportDivisionByZero]
 # cmd: mypy --any-exprs-report=out -c 'pass'

From 80232b0cd6305b848c0d454bac04a5fb30578766 Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Fri, 15 Sep 2023 20:36:25 +0100
Subject: [PATCH 130/288] Sync typeshed

Source commit:
https://github.com/python/typeshed/commit/0ea043253e70d0304478a6d0b58bcda4cc583d08
---
 mypy/typeshed/stdlib/_ctypes.pyi              |  18 ++-
 mypy/typeshed/stdlib/asyncio/tasks.pyi        | 105 ++++++++++++++----
 mypy/typeshed/stdlib/builtins.pyi             | 105 +++++++++++++++++-
 mypy/typeshed/stdlib/collections/__init__.pyi |  17 ++-
 mypy/typeshed/stdlib/csv.pyi                  |   8 +-
 mypy/typeshed/stdlib/ctypes/wintypes.pyi      |  89 ++++++++-------
 mypy/typeshed/stdlib/enum.pyi                 |  22 +++-
 mypy/typeshed/stdlib/functools.pyi            |  40 ++++---
 mypy/typeshed/stdlib/http/client.pyi          |   3 +
 mypy/typeshed/stdlib/http/cookies.pyi         |   6 +-
 mypy/typeshed/stdlib/imaplib.pyi              |  11 +-
 mypy/typeshed/stdlib/importlib/__init__.pyi   |   6 +-
 mypy/typeshed/stdlib/importlib/abc.pyi        |  60 ++++++----
 mypy/typeshed/stdlib/importlib/machinery.pyi  |  33 ++++--
 .../stdlib/importlib/metadata/__init__.pyi    |  12 +-
 .../stdlib/importlib/metadata/_meta.pyi       |  38 +++++--
 .../stdlib/importlib/resources/__init__.pyi   |  11 +-
 mypy/typeshed/stdlib/importlib/util.pyi       |  11 +-
 mypy/typeshed/stdlib/pathlib.pyi              |  19 +++-
 mypy/typeshed/stdlib/poplib.pyi               |  29 +++--
 mypy/typeshed/stdlib/smtplib.pyi              |  42 +++++--
 mypy/typeshed/stdlib/sqlite3/dbapi2.pyi       |   2 +-
 mypy/typeshed/stdlib/tkinter/__init__.pyi     |   7 +-
 mypy/typeshed/stdlib/turtle.pyi               |   6 +
 mypy/typeshed/stdlib/typing.pyi               |   9 +-
 mypy/typeshed/stdlib/unittest/mock.pyi        |  20 +++-
 mypy/typeshed/stdlib/urllib/request.pyi       |  18 ++-
 mypy/typeshed/stdlib/weakref.pyi              |   8 ++
 mypy/typeshed/stdlib/zipfile.pyi              |  14 ++-
 29 files changed, 574 insertions(+), 195 deletions(-)

diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi
index 165bb5337784..1f15ac057988 100644
--- a/mypy/typeshed/stdlib/_ctypes.pyi
+++ b/mypy/typeshed/stdlib/_ctypes.pyi
@@ -122,15 +122,23 @@ class CFuncPtr(_PointerLike, _CData):
 
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
 
-class _CField:
+_GetT = TypeVar("_GetT")
+_SetT = TypeVar("_SetT")
+
+class _CField(Generic[_CT, _GetT, _SetT]):
     offset: int
     size: int
+    @overload
+    def __get__(self, __instance: None, __owner: type[Any] | None) -> Self: ...
+    @overload
+    def __get__(self, __instance: Any, __owner: type[Any] | None) -> _GetT: ...
+    def __set__(self, __instance: Any, __value: _SetT) -> None: ...
 
 class _StructUnionMeta(_CDataMeta):
     _fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]]
     _pack_: int
     _anonymous_: Sequence[str]
-    def __getattr__(self, name: str) -> _CField: ...
+    def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ...
 
 class _StructUnionBase(_CData, metaclass=_StructUnionMeta):
     def __init__(self, *args: Any, **kw: Any) -> None: ...
@@ -151,7 +159,11 @@ class Array(_CData, Generic[_CT]):
     def _type_(self) -> type[_CT]: ...
     @_type_.setter
     def _type_(self, value: type[_CT]) -> None: ...
-    raw: bytes  # Note: only available if _CT == c_char
+    # Note: only available if _CT == c_char
+    @property
+    def raw(self) -> bytes: ...
+    @raw.setter
+    def raw(self, value: ReadableBuffer) -> None: ...
     value: Any  # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
     # TODO These methods cannot be annotated correctly at the moment.
     # All of these "Any"s stand for the array's element type, but it's not possible to use _CT
diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi
index 3bc65e3703c5..b6929deb0fae 100644
--- a/mypy/typeshed/stdlib/asyncio/tasks.pyi
+++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi
@@ -2,7 +2,7 @@ import concurrent.futures
 import sys
 from collections.abc import Awaitable, Coroutine, Generator, Iterable, Iterator
 from types import FrameType
-from typing import Any, Generic, TextIO, TypeVar, overload
+from typing import Any, Generic, Protocol, TextIO, TypeVar, overload
 from typing_extensions import Literal, TypeAlias
 
 from . import _CoroutineLike
@@ -14,27 +14,52 @@ if sys.version_info >= (3, 9):
 if sys.version_info >= (3, 11):
     from contextvars import Context
 
-__all__ = (
-    "Task",
-    "create_task",
-    "FIRST_COMPLETED",
-    "FIRST_EXCEPTION",
-    "ALL_COMPLETED",
-    "wait",
-    "wait_for",
-    "as_completed",
-    "sleep",
-    "gather",
-    "shield",
-    "ensure_future",
-    "run_coroutine_threadsafe",
-    "current_task",
-    "all_tasks",
-    "_register_task",
-    "_unregister_task",
-    "_enter_task",
-    "_leave_task",
-)
+if sys.version_info >= (3, 12):
+    __all__ = (
+        "Task",
+        "create_task",
+        "FIRST_COMPLETED",
+        "FIRST_EXCEPTION",
+        "ALL_COMPLETED",
+        "wait",
+        "wait_for",
+        "as_completed",
+        "sleep",
+        "gather",
+        "shield",
+        "ensure_future",
+        "run_coroutine_threadsafe",
+        "current_task",
+        "all_tasks",
+        "create_eager_task_factory",
+        "eager_task_factory",
+        "_register_task",
+        "_unregister_task",
+        "_enter_task",
+        "_leave_task",
+    )
+else:
+    __all__ = (
+        "Task",
+        "create_task",
+        "FIRST_COMPLETED",
+        "FIRST_EXCEPTION",
+        "ALL_COMPLETED",
+        "wait",
+        "wait_for",
+        "as_completed",
+        "sleep",
+        "gather",
+        "shield",
+        "ensure_future",
+        "run_coroutine_threadsafe",
+        "current_task",
+        "all_tasks",
+        "_register_task",
+        "_unregister_task",
+        "_enter_task",
+        "_leave_task",
+    )
 
 _T = TypeVar("_T")
 _T_co = TypeVar("_T_co", covariant=True)
@@ -356,5 +381,41 @@ else:
 def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ...
 def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ...
 def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ...
+
+if sys.version_info >= (3, 12):
+    _TaskT_co = TypeVar("_TaskT_co", bound=Task[Any], covariant=True)
+
+    class _CustomTaskConstructor(Protocol[_TaskT_co]):
+        def __call__(
+            self,
+            __coro: _TaskCompatibleCoro[Any],
+            *,
+            loop: AbstractEventLoop,
+            name: str | None,
+            context: Context | None,
+            eager_start: bool,
+        ) -> _TaskT_co: ...
+
+    class _EagerTaskFactoryType(Protocol[_TaskT_co]):
+        def __call__(
+            self,
+            loop: AbstractEventLoop,
+            coro: _TaskCompatibleCoro[Any],
+            *,
+            name: str | None = None,
+            context: Context | None = None,
+        ) -> _TaskT_co: ...
+
+    def create_eager_task_factory(
+        custom_task_constructor: _CustomTaskConstructor[_TaskT_co],
+    ) -> _EagerTaskFactoryType[_TaskT_co]: ...
+    def eager_task_factory(
+        loop: AbstractEventLoop | None,
+        coro: _TaskCompatibleCoro[_T_co],
+        *,
+        name: str | None = None,
+        context: Context | None = None,
+    ) -> Task[_T_co]: ...
+
 def _register_task(task: Task[Any]) -> None: ...
 def _unregister_task(task: Task[Any]) -> None: ...
diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index 66c644d09a4d..cf4f857c5524 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -56,6 +56,7 @@ from typing import (  # noqa: Y022
 from typing_extensions import (
     Concatenate,
     Literal,
+    LiteralString,
     ParamSpec,
     Self,
     SupportsIndex,
@@ -441,8 +442,17 @@ class str(Sequence[str]):
     def __new__(cls, object: object = ...) -> Self: ...
     @overload
     def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ...
+    @overload
+    def capitalize(self: LiteralString) -> LiteralString: ...
+    @overload
     def capitalize(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def casefold(self: LiteralString) -> LiteralString: ...
+    @overload
     def casefold(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
+    @overload
     def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
     def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
@@ -450,11 +460,20 @@ class str(Sequence[str]):
         self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
     if sys.version_info >= (3, 8):
+        @overload
+        def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ...
+        @overload
         def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ...  # type: ignore[misc]
     else:
+        @overload
+        def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ...
+        @overload
         def expandtabs(self, tabsize: int = 8) -> str: ...  # type: ignore[misc]
 
     def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
+    @overload
+    def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ...
+    @overload
     def format(self, *args: object, **kwargs: object) -> str: ...
     def format_map(self, map: _FormatMapMapping) -> str: ...
     def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
@@ -470,32 +489,91 @@ class str(Sequence[str]):
     def isspace(self) -> bool: ...
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
+    @overload
+    def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ...
+    @overload
     def join(self, __iterable: Iterable[str]) -> str: ...  # type: ignore[misc]
+    @overload
+    def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
+    @overload
     def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
+    @overload
+    def lower(self: LiteralString) -> LiteralString: ...
+    @overload
     def lower(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
+    @overload
     def lstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
+    @overload
+    def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
+    @overload
     def partition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
+    @overload
+    def replace(
+        self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1
+    ) -> LiteralString: ...
+    @overload
     def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ...  # type: ignore[misc]
     if sys.version_info >= (3, 9):
+        @overload
+        def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ...
+        @overload
         def removeprefix(self, __prefix: str) -> str: ...  # type: ignore[misc]
+        @overload
+        def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ...
+        @overload
         def removesuffix(self, __suffix: str) -> str: ...  # type: ignore[misc]
 
     def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
+    @overload
+    def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
+    @overload
     def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
+    @overload
+    def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
+    @overload
     def rpartition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
+    @overload
+    def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
+    @overload
     def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
+    @overload
+    def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
+    @overload
     def rstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
+    @overload
+    def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
+    @overload
     def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
+    @overload
+    def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ...
+    @overload
     def splitlines(self, keepends: bool = False) -> list[str]: ...  # type: ignore[misc]
     def startswith(
         self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
+    @overload
+    def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
+    @overload
     def strip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
+    @overload
+    def swapcase(self: LiteralString) -> LiteralString: ...
+    @overload
     def swapcase(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def title(self: LiteralString) -> LiteralString: ...
+    @overload
     def title(self) -> str: ...  # type: ignore[misc]
     def translate(self, __table: _TranslateTable) -> str: ...
+    @overload
+    def upper(self: LiteralString) -> LiteralString: ...
+    @overload
     def upper(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ...
+    @overload
     def zfill(self, __width: SupportsIndex) -> str: ...  # type: ignore[misc]
     @staticmethod
     @overload
@@ -506,6 +584,9 @@ class str(Sequence[str]):
     @staticmethod
     @overload
     def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ...
+    @overload
+    def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ...
+    @overload
     def __add__(self, __value: str) -> str: ...  # type: ignore[misc]
     # Incompatible with Sequence.__contains__
     def __contains__(self, __key: str) -> bool: ...  # type: ignore[override]
@@ -514,13 +595,25 @@ class str(Sequence[str]):
     def __getitem__(self, __key: SupportsIndex | slice) -> str: ...
     def __gt__(self, __value: str) -> bool: ...
     def __hash__(self) -> int: ...
+    @overload
+    def __iter__(self: LiteralString) -> Iterator[LiteralString]: ...
+    @overload
     def __iter__(self) -> Iterator[str]: ...  # type: ignore[misc]
     def __le__(self, __value: str) -> bool: ...
     def __len__(self) -> int: ...
     def __lt__(self, __value: str) -> bool: ...
+    @overload
+    def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ...
+    @overload
     def __mod__(self, __value: Any) -> str: ...
+    @overload
+    def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
+    @overload
     def __mul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __ne__(self, __value: object) -> bool: ...
+    @overload
+    def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
+    @overload
     def __rmul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __getnewargs__(self) -> tuple[str]: ...
 
@@ -1027,13 +1120,13 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
         @overload
-        def __or__(self, __value: Mapping[_KT, _VT]) -> dict[_KT, _VT]: ...
+        def __or__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]: ...
         @overload
-        def __or__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ...
+        def __or__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ...
         @overload
-        def __ror__(self, __value: Mapping[_KT, _VT]) -> dict[_KT, _VT]: ...
+        def __ror__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]: ...
         @overload
-        def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ...
+        def __ror__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ...
         # dict.__ior__ should be kept roughly in line with MutableMapping.update()
         @overload  # type: ignore[misc]
         def __ior__(self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ...
@@ -1698,11 +1791,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit
 # Instead, we special-case the most common examples of this: bool and literal integers.
 if sys.version_info >= (3, 8):
     @overload
-    def sum(__iterable: Iterable[bool], start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ...  # type: ignore[misc]
 
 else:
     @overload
-    def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ...  # type: ignore[misc]
 
 @overload
 def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ...
diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi
index 8ceecd1f354e..3b8d92f78612 100644
--- a/mypy/typeshed/stdlib/collections/__init__.pyi
+++ b/mypy/typeshed/stdlib/collections/__init__.pyi
@@ -96,6 +96,11 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
         def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ...
         @overload
         def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ...
+    if sys.version_info >= (3, 12):
+        @overload
+        def get(self, key: _KT, default: None = None) -> _VT | None: ...
+        @overload
+        def get(self, key: _KT, default: _T) -> _VT | _T: ...
 
 class UserList(MutableSequence[_T]):
     data: list[_T]
@@ -402,13 +407,13 @@ class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]):
     def copy(self) -> Self: ...
     if sys.version_info >= (3, 9):
         @overload
-        def __or__(self, __value: Mapping[_KT, _VT]) -> Self: ...
+        def __or__(self, __value: dict[_KT, _VT]) -> Self: ...
         @overload
-        def __or__(self, __value: Mapping[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ...
+        def __or__(self, __value: dict[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ...
         @overload
-        def __ror__(self, __value: Mapping[_KT, _VT]) -> Self: ...
+        def __ror__(self, __value: dict[_KT, _VT]) -> Self: ...
         @overload
-        def __ror__(self, __value: Mapping[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ...
+        def __ror__(self, __value: dict[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ...  # type: ignore[misc]
 
 class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     maps: list[MutableMapping[_KT, _VT]]
@@ -422,6 +427,10 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
     def __iter__(self) -> Iterator[_KT]: ...
     def __len__(self) -> int: ...
     def __contains__(self, key: object) -> bool: ...
+    @overload
+    def get(self, key: _KT, default: None = None) -> _VT | None: ...
+    @overload
+    def get(self, key: _KT, default: _T) -> _VT | _T: ...
     def __missing__(self, key: _KT) -> _VT: ...  # undocumented
     def __bool__(self) -> bool: ...
     # Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences.
diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi
index a9c7fe0492c8..53425fbcccb1 100644
--- a/mypy/typeshed/stdlib/csv.pyi
+++ b/mypy/typeshed/stdlib/csv.pyi
@@ -71,8 +71,8 @@ class unix_dialect(Dialect): ...
 
 class DictReader(Iterator[_DictReadMapping[_T | Any, str | Any]], Generic[_T]):
     fieldnames: Sequence[_T] | None
-    restkey: str | None
-    restval: str | None
+    restkey: _T | None
+    restval: str | Any | None
     reader: _reader
     dialect: _DialectLike
     line_num: int
@@ -81,8 +81,8 @@ class DictReader(Iterator[_DictReadMapping[_T | Any, str | Any]], Generic[_T]):
         self,
         f: Iterable[str],
         fieldnames: Sequence[_T],
-        restkey: str | None = None,
-        restval: str | None = None,
+        restkey: _T | None = None,
+        restval: str | Any | None = None,
         dialect: _DialectLike = "excel",
         *,
         delimiter: str = ",",
diff --git a/mypy/typeshed/stdlib/ctypes/wintypes.pyi b/mypy/typeshed/stdlib/ctypes/wintypes.pyi
index 3bd27934750a..59c7ae3e599f 100644
--- a/mypy/typeshed/stdlib/ctypes/wintypes.pyi
+++ b/mypy/typeshed/stdlib/ctypes/wintypes.pyi
@@ -1,6 +1,7 @@
 from ctypes import (
     Array,
     Structure,
+    _CField,
     _Pointer,
     _SimpleCData,
     c_byte,
@@ -20,6 +21,7 @@ from ctypes import (
     c_wchar,
     c_wchar_p,
 )
+from typing import TypeVar
 from typing_extensions import TypeAlias
 
 BYTE = c_byte
@@ -101,39 +103,42 @@ HWND = HANDLE
 SC_HANDLE = HANDLE
 SERVICE_STATUS_HANDLE = HANDLE
 
+_CIntLikeT = TypeVar("_CIntLikeT", bound=_SimpleCData[int])
+_CIntLikeField: TypeAlias = _CField[_CIntLikeT, int, _CIntLikeT | int]
+
 class RECT(Structure):
-    left: LONG
-    top: LONG
-    right: LONG
-    bottom: LONG
+    left: _CIntLikeField[LONG]
+    top: _CIntLikeField[LONG]
+    right: _CIntLikeField[LONG]
+    bottom: _CIntLikeField[LONG]
 
 RECTL = RECT
 _RECTL = RECT
 tagRECT = RECT
 
 class _SMALL_RECT(Structure):
-    Left: SHORT
-    Top: SHORT
-    Right: SHORT
-    Bottom: SHORT
+    Left: _CIntLikeField[SHORT]
+    Top: _CIntLikeField[SHORT]
+    Right: _CIntLikeField[SHORT]
+    Bottom: _CIntLikeField[SHORT]
 
 SMALL_RECT = _SMALL_RECT
 
 class _COORD(Structure):
-    X: SHORT
-    Y: SHORT
+    X: _CIntLikeField[SHORT]
+    Y: _CIntLikeField[SHORT]
 
 class POINT(Structure):
-    x: LONG
-    y: LONG
+    x: _CIntLikeField[LONG]
+    y: _CIntLikeField[LONG]
 
 POINTL = POINT
 _POINTL = POINT
 tagPOINT = POINT
 
 class SIZE(Structure):
-    cx: LONG
-    cy: LONG
+    cx: _CIntLikeField[LONG]
+    cy: _CIntLikeField[LONG]
 
 SIZEL = SIZE
 tagSIZE = SIZE
@@ -141,45 +146,45 @@ tagSIZE = SIZE
 def RGB(red: int, green: int, blue: int) -> int: ...
 
 class FILETIME(Structure):
-    dwLowDateTime: DWORD
-    dwHighDateTime: DWORD
+    dwLowDateTime: _CIntLikeField[DWORD]
+    dwHighDateTime: _CIntLikeField[DWORD]
 
 _FILETIME = FILETIME
 
 class MSG(Structure):
-    hWnd: HWND
-    message: UINT
-    wParam: WPARAM
-    lParam: LPARAM
-    time: DWORD
-    pt: POINT
+    hWnd: _CField[HWND, int | None, HWND | int | None]
+    message: _CIntLikeField[UINT]
+    wParam: _CIntLikeField[WPARAM]
+    lParam: _CIntLikeField[LPARAM]
+    time: _CIntLikeField[DWORD]
+    pt: _CField[POINT, POINT, POINT]
 
 tagMSG = MSG
 MAX_PATH: int
 
 class WIN32_FIND_DATAA(Structure):
-    dwFileAttributes: DWORD
-    ftCreationTime: FILETIME
-    ftLastAccessTime: FILETIME
-    ftLastWriteTime: FILETIME
-    nFileSizeHigh: DWORD
-    nFileSizeLow: DWORD
-    dwReserved0: DWORD
-    dwReserved1: DWORD
-    cFileName: Array[CHAR]
-    cAlternateFileName: Array[CHAR]
+    dwFileAttributes: _CIntLikeField[DWORD]
+    ftCreationTime: _CField[FILETIME, FILETIME, FILETIME]
+    ftLastAccessTime: _CField[FILETIME, FILETIME, FILETIME]
+    ftLastWriteTime: _CField[FILETIME, FILETIME, FILETIME]
+    nFileSizeHigh: _CIntLikeField[DWORD]
+    nFileSizeLow: _CIntLikeField[DWORD]
+    dwReserved0: _CIntLikeField[DWORD]
+    dwReserved1: _CIntLikeField[DWORD]
+    cFileName: _CField[Array[CHAR], bytes, bytes]
+    cAlternateFileName: _CField[Array[CHAR], bytes, bytes]
 
 class WIN32_FIND_DATAW(Structure):
-    dwFileAttributes: DWORD
-    ftCreationTime: FILETIME
-    ftLastAccessTime: FILETIME
-    ftLastWriteTime: FILETIME
-    nFileSizeHigh: DWORD
-    nFileSizeLow: DWORD
-    dwReserved0: DWORD
-    dwReserved1: DWORD
-    cFileName: Array[WCHAR]
-    cAlternateFileName: Array[WCHAR]
+    dwFileAttributes: _CIntLikeField[DWORD]
+    ftCreationTime: _CField[FILETIME, FILETIME, FILETIME]
+    ftLastAccessTime: _CField[FILETIME, FILETIME, FILETIME]
+    ftLastWriteTime: _CField[FILETIME, FILETIME, FILETIME]
+    nFileSizeHigh: _CIntLikeField[DWORD]
+    nFileSizeLow: _CIntLikeField[DWORD]
+    dwReserved0: _CIntLikeField[DWORD]
+    dwReserved1: _CIntLikeField[DWORD]
+    cFileName: _CField[Array[WCHAR], str, str]
+    cAlternateFileName: _CField[Array[WCHAR], str, str]
 
 # These pointer type definitions use _Pointer[...] instead of POINTER(...), to allow them
 # to be used in type annotations.
diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi
index e6eaf6c413dc..10ea19257144 100644
--- a/mypy/typeshed/stdlib/enum.pyi
+++ b/mypy/typeshed/stdlib/enum.pyi
@@ -119,10 +119,12 @@ class EnumMeta(type):
     def __len__(self) -> int: ...
     def __bool__(self) -> Literal[True]: ...
     def __dir__(self) -> list[str]: ...
-    # Simple value lookup
+
+    # Overload 1: Value lookup on an already existing enum class (simple case)
     @overload
     def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ...
-    # Functional Enum API
+
+    # Overload 2: Functional API for constructing new enum classes.
     if sys.version_info >= (3, 11):
         @overload
         def __call__(
@@ -148,6 +150,18 @@ class EnumMeta(type):
             type: type | None = None,
             start: int = 1,
         ) -> type[Enum]: ...
+
+    # Overload 3 (py312+ only): Value lookup on an already existing enum class (complex case)
+    #
+    # >>> class Foo(enum.Enum):
+    # ...     X = 1, 2, 3
+    # >>> Foo(1, 2, 3)
+    # <Foo.X: (1, 2, 3)>
+    #
+    if sys.version_info >= (3, 12):
+        @overload
+        def __call__(cls: type[_EnumMemberT], value: Any, *values: Any) -> _EnumMemberT: ...
+
     _member_names_: list[str]  # undocumented
     _member_map_: dict[str, Enum]  # undocumented
     _value2member_map_: dict[Any, Enum]  # undocumented
@@ -160,6 +174,7 @@ if sys.version_info >= (3, 11):
         def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ...
         name: str
         clsname: str
+        member: Enum | None
     _magic_enum_attr = property
 else:
     _magic_enum_attr = types.DynamicClassAttribute
@@ -191,6 +206,9 @@ class Enum(metaclass=EnumMeta):
     if sys.version_info >= (3, 11):
         def __copy__(self) -> Self: ...
         def __deepcopy__(self, memo: Any) -> Self: ...
+    if sys.version_info >= (3, 12):
+        @classmethod
+        def __signature__(cls) -> str: ...
 
 if sys.version_info >= (3, 11):
     class ReprEnum(Enum): ...
diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi
index 1b4e59b7c120..0d08cdb19e3f 100644
--- a/mypy/typeshed/stdlib/functools.pyi
+++ b/mypy/typeshed/stdlib/functools.pyi
@@ -1,9 +1,9 @@
 import sys
 import types
-from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems
+from _typeshed import SupportsAllComparisons, SupportsItems
 from collections.abc import Callable, Hashable, Iterable, Sequence, Sized
 from typing import Any, Generic, NamedTuple, TypeVar, overload
-from typing_extensions import Literal, Self, TypeAlias, TypedDict, final
+from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final
 
 if sys.version_info >= (3, 9):
     from types import GenericAlias
@@ -28,10 +28,12 @@ if sys.version_info >= (3, 8):
 if sys.version_info >= (3, 9):
     __all__ += ["cache"]
 
-_AnyCallable: TypeAlias = Callable[..., object]
-
 _T = TypeVar("_T")
 _S = TypeVar("_S")
+_PWrapped = ParamSpec("_PWrapped")
+_RWrapped = TypeVar("_RWrapped")
+_PWrapper = ParamSpec("_PWrapper")
+_RWrapper = TypeVar("_RWrapper")
 
 @overload
 def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ...
@@ -85,31 +87,41 @@ else:
     ]
 WRAPPER_UPDATES: tuple[Literal["__dict__"]]
 
+class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWrapper]):
+    __wrapped__: Callable[_PWrapped, _RWrapped]
+    def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWrapper: ...
+    # as with ``Callable``, we'll assume that these attributes exist
+    __name__: str
+    __qualname__: str
+
+class _Wrapper(Generic[_PWrapped, _RWrapped]):
+    def __call__(self, f: Callable[_PWrapper, _RWrapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
+
 if sys.version_info >= (3, 12):
     def update_wrapper(
-        wrapper: _T,
-        wrapped: _AnyCallable,
+        wrapper: Callable[_PWrapper, _RWrapper],
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _T: ...
+    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
     def wraps(
-        wrapped: _AnyCallable,
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> IdentityFunction: ...
+    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
 
 else:
     def update_wrapper(
-        wrapper: _T,
-        wrapped: _AnyCallable,
+        wrapper: Callable[_PWrapper, _RWrapper],
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _T: ...
+    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
     def wraps(
-        wrapped: _AnyCallable,
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> IdentityFunction: ...
+    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
 
 def total_ordering(cls: type[_T]) -> type[_T]: ...
 def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ...
diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi
index 4b5ed3d8bda0..3e5e496ab501 100644
--- a/mypy/typeshed/stdlib/http/client.pyi
+++ b/mypy/typeshed/stdlib/http/client.pyi
@@ -169,6 +169,9 @@ class HTTPConnection:
     ) -> None: ...
     def getresponse(self) -> HTTPResponse: ...
     def set_debuglevel(self, level: int) -> None: ...
+    if sys.version_info >= (3, 12):
+        def get_proxy_response_headers(self) -> HTTPMessage | None: ...
+
     def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: ...
     def connect(self) -> None: ...
     def close(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/http/cookies.pyi b/mypy/typeshed/stdlib/http/cookies.pyi
index e24ef9cbdd2e..3d19bb108c2d 100644
--- a/mypy/typeshed/stdlib/http/cookies.pyi
+++ b/mypy/typeshed/stdlib/http/cookies.pyi
@@ -49,12 +49,12 @@ class Morsel(dict[str, Any], Generic[_T]):
 
 class BaseCookie(dict[str, Morsel[_T]], Generic[_T]):
     def __init__(self, input: _DataType | None = None) -> None: ...
-    def value_decode(self, val: str) -> _T: ...
-    def value_encode(self, val: _T) -> str: ...
+    def value_decode(self, val: str) -> tuple[_T, str]: ...
+    def value_encode(self, val: _T) -> tuple[_T, str]: ...
     def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: ...
     __str__ = output
     def js_output(self, attrs: list[str] | None = None) -> str: ...
     def load(self, rawdata: _DataType) -> None: ...
     def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: ...
 
-class SimpleCookie(BaseCookie[_T], Generic[_T]): ...
+class SimpleCookie(BaseCookie[str]): ...
diff --git a/mypy/typeshed/stdlib/imaplib.pyi b/mypy/typeshed/stdlib/imaplib.pyi
index 7781559c3888..a61848c9af13 100644
--- a/mypy/typeshed/stdlib/imaplib.pyi
+++ b/mypy/typeshed/stdlib/imaplib.pyi
@@ -108,9 +108,14 @@ class IMAP4:
     def print_log(self) -> None: ...
 
 class IMAP4_SSL(IMAP4):
-    keyfile: str
-    certfile: str
-    if sys.version_info >= (3, 9):
+    if sys.version_info < (3, 12):
+        keyfile: str
+        certfile: str
+    if sys.version_info >= (3, 12):
+        def __init__(
+            self, host: str = "", port: int = 993, *, ssl_context: SSLContext | None = None, timeout: float | None = None
+        ) -> None: ...
+    elif sys.version_info >= (3, 9):
         def __init__(
             self,
             host: str = "",
diff --git a/mypy/typeshed/stdlib/importlib/__init__.pyi b/mypy/typeshed/stdlib/importlib/__init__.pyi
index 8d73319f8c3d..8506efc01171 100644
--- a/mypy/typeshed/stdlib/importlib/__init__.pyi
+++ b/mypy/typeshed/stdlib/importlib/__init__.pyi
@@ -1,3 +1,4 @@
+import sys
 from collections.abc import Mapping, Sequence
 from importlib.abc import Loader
 from types import ModuleType
@@ -15,6 +16,9 @@ def __import__(
 
 # `importlib.import_module` return type should be kept the same as `builtins.__import__`
 def import_module(name: str, package: str | None = None) -> ModuleType: ...
-def find_loader(name: str, path: str | None = None) -> Loader | None: ...
+
+if sys.version_info < (3, 12):
+    def find_loader(name: str, path: str | None = None) -> Loader | None: ...
+
 def invalidate_caches() -> None: ...
 def reload(module: ModuleType) -> ModuleType: ...
diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi
index 4bf46104ba6d..28c33205a4df 100644
--- a/mypy/typeshed/stdlib/importlib/abc.pyi
+++ b/mypy/typeshed/stdlib/importlib/abc.pyi
@@ -20,7 +20,6 @@ from typing_extensions import Literal
 if sys.version_info >= (3, 11):
     __all__ = [
         "Loader",
-        "Finder",
         "MetaPathFinder",
         "PathEntryFinder",
         "ResourceLoader",
@@ -28,16 +27,19 @@ if sys.version_info >= (3, 11):
         "ExecutionLoader",
         "FileLoader",
         "SourceLoader",
-        "ResourceReader",
-        "Traversable",
-        "TraversableResources",
     ]
 
-class Finder(metaclass=ABCMeta): ...
+    if sys.version_info < (3, 12):
+        __all__ += ["Finder", "ResourceReader", "Traversable", "TraversableResources"]
+
+if sys.version_info < (3, 12):
+    class Finder(metaclass=ABCMeta): ...
 
 class Loader(metaclass=ABCMeta):
     def load_module(self, fullname: str) -> types.ModuleType: ...
-    def module_repr(self, module: types.ModuleType) -> str: ...
+    if sys.version_info < (3, 12):
+        def module_repr(self, module: types.ModuleType) -> str: ...
+
     def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ...
     # Not defined on the actual class for backwards-compatibility reasons,
     # but expected in new code.
@@ -68,21 +70,37 @@ class SourceLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta):
     def get_source(self, fullname: str) -> str | None: ...
     def path_stats(self, path: str) -> Mapping[str, Any]: ...
 
-# Please keep in sync with sys._MetaPathFinder
-class MetaPathFinder(Finder):
-    def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ...
-    def invalidate_caches(self) -> None: ...
-    # Not defined on the actual class, but expected to exist.
-    def find_spec(
-        self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ...
-    ) -> ModuleSpec | None: ...
-
-class PathEntryFinder(Finder):
-    def find_module(self, fullname: str) -> Loader | None: ...
-    def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ...
-    def invalidate_caches(self) -> None: ...
-    # Not defined on the actual class, but expected to exist.
-    def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ...
+# The base classes differ on 3.12:
+if sys.version_info >= (3, 12):
+    # Please keep in sync with sys._MetaPathFinder
+    class MetaPathFinder(metaclass=ABCMeta):
+        def invalidate_caches(self) -> None: ...
+        # Not defined on the actual class, but expected to exist.
+        def find_spec(
+            self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ...
+        ) -> ModuleSpec | None: ...
+
+    class PathEntryFinder(metaclass=ABCMeta):
+        def invalidate_caches(self) -> None: ...
+        # Not defined on the actual class, but expected to exist.
+        def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ...
+
+else:
+    # Please keep in sync with sys._MetaPathFinder
+    class MetaPathFinder(Finder):
+        def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ...
+        def invalidate_caches(self) -> None: ...
+        # Not defined on the actual class, but expected to exist.
+        def find_spec(
+            self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ...
+        ) -> ModuleSpec | None: ...
+
+    class PathEntryFinder(Finder):
+        def find_module(self, fullname: str) -> Loader | None: ...
+        def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ...
+        def invalidate_caches(self) -> None: ...
+        # Not defined on the actual class, but expected to exist.
+        def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ...
 
 class FileLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta):
     name: str
diff --git a/mypy/typeshed/stdlib/importlib/machinery.pyi b/mypy/typeshed/stdlib/importlib/machinery.pyi
index f5037da00d5f..1a9680ab3c46 100644
--- a/mypy/typeshed/stdlib/importlib/machinery.pyi
+++ b/mypy/typeshed/stdlib/importlib/machinery.pyi
@@ -31,8 +31,10 @@ class ModuleSpec:
 
 class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader):
     # MetaPathFinder
-    @classmethod
-    def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ...
+    if sys.version_info < (3, 12):
+        @classmethod
+        def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ...
+
     @classmethod
     def find_spec(
         cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None
@@ -47,8 +49,9 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader)
     @classmethod
     def get_source(cls, fullname: str) -> None: ...
     # Loader
-    @staticmethod
-    def module_repr(module: types.ModuleType) -> str: ...
+    if sys.version_info < (3, 12):
+        @staticmethod
+        def module_repr(module: types.ModuleType) -> str: ...
     if sys.version_info >= (3, 10):
         @staticmethod
         def create_module(spec: ModuleSpec) -> types.ModuleType | None: ...
@@ -62,8 +65,10 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader)
 
 class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader):
     # MetaPathFinder
-    @classmethod
-    def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ...
+    if sys.version_info < (3, 12):
+        @classmethod
+        def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ...
+
     @classmethod
     def find_spec(
         cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None
@@ -78,8 +83,9 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader):
     @classmethod
     def get_source(cls, fullname: str) -> None: ...
     # Loader
-    @staticmethod
-    def module_repr(m: types.ModuleType) -> str: ...
+    if sys.version_info < (3, 12):
+        @staticmethod
+        def module_repr(m: types.ModuleType) -> str: ...
     if sys.version_info >= (3, 10):
         @staticmethod
         def create_module(spec: ModuleSpec) -> types.ModuleType | None: ...
@@ -91,8 +97,10 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader):
     def exec_module(module: types.ModuleType) -> None: ...
 
 class WindowsRegistryFinder(importlib.abc.MetaPathFinder):
-    @classmethod
-    def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ...
+    if sys.version_info < (3, 12):
+        @classmethod
+        def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ...
+
     @classmethod
     def find_spec(
         cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None
@@ -116,8 +124,9 @@ class PathFinder:
     def find_spec(
         cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None
     ) -> ModuleSpec | None: ...
-    @classmethod
-    def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ...
+    if sys.version_info < (3, 12):
+        @classmethod
+        def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ...
 
 SOURCE_SUFFIXES: list[str]
 DEBUG_BYTECODE_SUFFIXES: list[str]
diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
index 0f8a6f56cf88..e52756544e9a 100644
--- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
+++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
@@ -88,6 +88,7 @@ if sys.version_info >= (3, 10):
         @property
         def groups(self) -> set[str]: ...
 
+if sys.version_info >= (3, 10) and sys.version_info < (3, 12):
     class SelectableGroups(dict[str, EntryPoints]):  # use as dict is deprecated since 3.10
         @classmethod
         def load(cls, eps: Iterable[EntryPoint]) -> Self: ...
@@ -195,6 +196,16 @@ def distributions(
 
 if sys.version_info >= (3, 10):
     def metadata(distribution_name: str) -> PackageMetadata: ...
+
+else:
+    def metadata(distribution_name: str) -> Message: ...
+
+if sys.version_info >= (3, 12):
+    def entry_points(
+        *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ...
+    ) -> EntryPoints: ...
+
+elif sys.version_info >= (3, 10):
     @overload
     def entry_points() -> SelectableGroups: ...  # type: ignore[misc]
     @overload
@@ -203,7 +214,6 @@ if sys.version_info >= (3, 10):
     ) -> EntryPoints: ...
 
 else:
-    def metadata(distribution_name: str) -> Message: ...
     def entry_points() -> dict[str, list[EntryPoint]]: ...
 
 def version(distribution_name: str) -> str: ...
diff --git a/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi b/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi
index e3504fe4036a..64fefa9a84e2 100644
--- a/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi
+++ b/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi
@@ -1,5 +1,6 @@
+import sys
 from collections.abc import Iterator
-from typing import Any, Protocol, TypeVar
+from typing import Any, Protocol, TypeVar, overload
 
 _T = TypeVar("_T")
 
@@ -8,15 +9,32 @@ class PackageMetadata(Protocol):
     def __contains__(self, item: str) -> bool: ...
     def __getitem__(self, key: str) -> str: ...
     def __iter__(self) -> Iterator[str]: ...
-    def get_all(self, name: str, failobj: _T = ...) -> list[Any] | _T: ...
     @property
     def json(self) -> dict[str, str | list[str]]: ...
+    @overload
+    def get_all(self, name: str, failobj: None = None) -> list[Any] | None: ...
+    @overload
+    def get_all(self, name: str, failobj: _T) -> list[Any] | _T: ...
+    if sys.version_info >= (3, 12):
+        @overload
+        def get(self, name: str, failobj: None = None) -> str | None: ...
+        @overload
+        def get(self, name: str, failobj: _T) -> _T | str: ...
 
-class SimplePath(Protocol):
-    def joinpath(self) -> SimplePath: ...
-    def parent(self) -> SimplePath: ...
-    def read_text(self) -> str: ...
-    # There was a bug in `SimplePath` definition in cpython, see #8451
-    #  Strictly speaking `__div__` was defined in 3.10, not __truediv__,
-    # but it should have always been `__truediv__`.
-    def __truediv__(self) -> SimplePath: ...
+if sys.version_info >= (3, 12):
+    class SimplePath(Protocol[_T]):
+        def joinpath(self) -> _T: ...
+        @property
+        def parent(self) -> _T: ...
+        def read_text(self) -> str: ...
+        def __truediv__(self, other: _T | str) -> _T: ...
+
+else:
+    class SimplePath(Protocol):
+        def joinpath(self) -> SimplePath: ...
+        def parent(self) -> SimplePath: ...
+        def read_text(self) -> str: ...
+        # There was a bug in `SimplePath` definition in cpython, see #8451
+        #  Strictly speaking `__div__` was defined in 3.10, not __truediv__,
+        # but it should have always been `__truediv__`.
+        def __truediv__(self) -> SimplePath: ...
diff --git a/mypy/typeshed/stdlib/importlib/resources/__init__.pyi b/mypy/typeshed/stdlib/importlib/resources/__init__.pyi
index ba3d9b087754..8d656563772c 100644
--- a/mypy/typeshed/stdlib/importlib/resources/__init__.pyi
+++ b/mypy/typeshed/stdlib/importlib/resources/__init__.pyi
@@ -7,6 +7,9 @@ from types import ModuleType
 from typing import Any, BinaryIO, TextIO
 from typing_extensions import TypeAlias
 
+if sys.version_info >= (3, 9):
+    from importlib.abc import Traversable
+
 __all__ = ["Package", "Resource", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"]
 
 if sys.version_info >= (3, 9):
@@ -31,9 +34,13 @@ def is_resource(package: Package, name: str) -> bool: ...
 def contents(package: Package) -> Iterator[str]: ...
 
 if sys.version_info >= (3, 9):
-    from importlib.abc import Traversable
-    def files(package: Package) -> Traversable: ...
     def as_file(path: Traversable) -> AbstractContextManager[Path]: ...
 
+if sys.version_info >= (3, 12):
+    def files(anchor: Package | None = ...) -> Traversable: ...
+
+elif sys.version_info >= (3, 9):
+    def files(package: Package) -> Traversable: ...
+
 if sys.version_info >= (3, 10):
     from importlib.abc import ResourceReader as ResourceReader
diff --git a/mypy/typeshed/stdlib/importlib/util.pyi b/mypy/typeshed/stdlib/importlib/util.pyi
index f988eb270a26..6608f70d4469 100644
--- a/mypy/typeshed/stdlib/importlib/util.pyi
+++ b/mypy/typeshed/stdlib/importlib/util.pyi
@@ -1,5 +1,6 @@
 import importlib.abc
 import importlib.machinery
+import sys
 import types
 from _typeshed import ReadableBuffer, StrOrBytesPath
 from collections.abc import Callable
@@ -8,9 +9,11 @@ from typing_extensions import ParamSpec
 
 _P = ParamSpec("_P")
 
-def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ...
-def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ...
-def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ...
+if sys.version_info < (3, 12):
+    def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ...
+    def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ...
+    def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ...
+
 def resolve_name(name: str, package: str | None) -> str: ...
 
 MAGIC_NUMBER: bytes
@@ -37,4 +40,4 @@ class LazyLoader(importlib.abc.Loader):
     def factory(cls, loader: importlib.abc.Loader) -> Callable[..., LazyLoader]: ...
     def exec_module(self, module: types.ModuleType) -> None: ...
 
-def source_hash(source_bytes: ReadableBuffer) -> int: ...
+def source_hash(source_bytes: ReadableBuffer) -> bytes: ...
diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi
index a509ec3af9f2..10ffa4a778e8 100644
--- a/mypy/typeshed/stdlib/pathlib.pyi
+++ b/mypy/typeshed/stdlib/pathlib.pyi
@@ -8,6 +8,7 @@ from _typeshed import (
     ReadableBuffer,
     StrOrBytesPath,
     StrPath,
+    Unused,
 )
 from collections.abc import Callable, Generator, Iterator, Sequence
 from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper
@@ -38,9 +39,13 @@ class PurePath(PathLike[str]):
     def suffixes(self) -> list[str]: ...
     @property
     def stem(self) -> str: ...
-    def __new__(cls, *args: StrPath) -> Self: ...
+    if sys.version_info >= (3, 12):
+        def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ...
+        def __init__(self, *args: StrPath) -> None: ...
+    else:
+        def __new__(cls, *args: StrPath) -> Self: ...
+
     def __hash__(self) -> int: ...
-    def __eq__(self, other: object) -> bool: ...
     def __fspath__(self) -> str: ...
     def __lt__(self, other: PurePath) -> bool: ...
     def __le__(self, other: PurePath) -> bool: ...
@@ -53,7 +58,9 @@ class PurePath(PathLike[str]):
     def as_uri(self) -> str: ...
     def is_absolute(self) -> bool: ...
     def is_reserved(self) -> bool: ...
-    if sys.version_info >= (3, 9):
+    if sys.version_info >= (3, 12):
+        def is_relative_to(self, __other: StrPath, *_deprecated: StrPath) -> bool: ...
+    elif sys.version_info >= (3, 9):
         def is_relative_to(self, *other: StrPath) -> bool: ...
 
     if sys.version_info >= (3, 12):
@@ -61,7 +68,11 @@ class PurePath(PathLike[str]):
     else:
         def match(self, path_pattern: str) -> bool: ...
 
-    def relative_to(self, *other: StrPath) -> Self: ...
+    if sys.version_info >= (3, 12):
+        def relative_to(self, __other: StrPath, *_deprecated: StrPath, walk_up: bool = False) -> Self: ...
+    else:
+        def relative_to(self, *other: StrPath) -> Self: ...
+
     def with_name(self, name: str) -> Self: ...
     if sys.version_info >= (3, 9):
         def with_stem(self, stem: str) -> Self: ...
diff --git a/mypy/typeshed/stdlib/poplib.pyi b/mypy/typeshed/stdlib/poplib.pyi
index c64e47e8ef72..808e7e5222af 100644
--- a/mypy/typeshed/stdlib/poplib.pyi
+++ b/mypy/typeshed/stdlib/poplib.pyi
@@ -1,5 +1,6 @@
 import socket
 import ssl
+import sys
 from builtins import list as _list  # conflicts with a method named "list"
 from re import Pattern
 from typing import Any, BinaryIO, NoReturn, overload
@@ -51,14 +52,20 @@ class POP3:
     def stls(self, context: ssl.SSLContext | None = None) -> bytes: ...
 
 class POP3_SSL(POP3):
-    def __init__(
-        self,
-        host: str,
-        port: int = 995,
-        keyfile: str | None = None,
-        certfile: str | None = None,
-        timeout: float = ...,
-        context: ssl.SSLContext | None = None,
-    ) -> None: ...
-    # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored
-    def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ...
+    if sys.version_info >= (3, 12):
+        def __init__(
+            self, host: str, port: int = 995, *, timeout: float = ..., context: ssl.SSLContext | None = None
+        ) -> None: ...
+        def stls(self, context: Any = None) -> NoReturn: ...
+    else:
+        def __init__(
+            self,
+            host: str,
+            port: int = 995,
+            keyfile: str | None = None,
+            certfile: str | None = None,
+            timeout: float = ...,
+            context: ssl.SSLContext | None = None,
+        ) -> None: ...
+        # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored
+        def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ...
diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi
index 584fa164fec9..e584d7f571a7 100644
--- a/mypy/typeshed/stdlib/smtplib.pyi
+++ b/mypy/typeshed/stdlib/smtplib.pyi
@@ -128,7 +128,13 @@ class SMTP:
     def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: ...
     def auth_login(self, challenge: ReadableBuffer | None = None) -> str: ...
     def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: ...
-    def starttls(self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None) -> _Reply: ...
+    if sys.version_info >= (3, 12):
+        def starttls(self, *, context: SSLContext | None = None) -> _Reply: ...
+    else:
+        def starttls(
+            self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None
+        ) -> _Reply: ...
+
     def sendmail(
         self,
         from_addr: str,
@@ -152,17 +158,29 @@ class SMTP_SSL(SMTP):
     keyfile: str | None
     certfile: str | None
     context: SSLContext
-    def __init__(
-        self,
-        host: str = "",
-        port: int = 0,
-        local_hostname: str | None = None,
-        keyfile: str | None = None,
-        certfile: str | None = None,
-        timeout: float = ...,
-        source_address: _SourceAddress | None = None,
-        context: SSLContext | None = None,
-    ) -> None: ...
+    if sys.version_info >= (3, 12):
+        def __init__(
+            self,
+            host: str = "",
+            port: int = 0,
+            local_hostname: str | None = None,
+            *,
+            timeout: float = ...,
+            source_address: _SourceAddress | None = None,
+            context: SSLContext | None = None,
+        ) -> None: ...
+    else:
+        def __init__(
+            self,
+            host: str = "",
+            port: int = 0,
+            local_hostname: str | None = None,
+            keyfile: str | None = None,
+            certfile: str | None = None,
+            timeout: float = ...,
+            source_address: _SourceAddress | None = None,
+            context: SSLContext | None = None,
+        ) -> None: ...
 
 LMTP_PORT: int
 
diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
index 41f731e21e26..e85f49207763 100644
--- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
+++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
@@ -351,7 +351,7 @@ class Connection:
     @overload
     def cursor(self, cursorClass: None = None) -> Cursor: ...
     @overload
-    def cursor(self, cursorClass: Callable[[], _CursorT]) -> _CursorT: ...
+    def cursor(self, cursorClass: Callable[[Connection], _CursorT]) -> _CursorT: ...
     def execute(self, sql: str, parameters: _Parameters = ...) -> Cursor: ...
     def executemany(self, __sql: str, __parameters: Iterable[_Parameters]) -> Cursor: ...
     def executescript(self, __sql_script: str) -> Cursor: ...
diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi
index a03c48c039dd..a0a88a8ac82e 100644
--- a/mypy/typeshed/stdlib/tkinter/__init__.pyi
+++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi
@@ -6,7 +6,7 @@ from enum import Enum
 from tkinter.constants import *
 from tkinter.font import _FontDescription
 from types import TracebackType
-from typing import Any, Generic, NamedTuple, Protocol, TypeVar, overload, type_check_only
+from typing import Any, Generic, NamedTuple, TypeVar, overload, type_check_only
 from typing_extensions import Literal, TypeAlias, TypedDict
 
 if sys.version_info >= (3, 9):
@@ -720,9 +720,6 @@ class Wm:
     def wm_withdraw(self) -> None: ...
     withdraw = wm_withdraw
 
-class _ExceptionReportingCallback(Protocol):
-    def __call__(self, __exc: type[BaseException], __val: BaseException, __tb: TracebackType | None) -> object: ...
-
 class Tk(Misc, Wm):
     master: None
     def __init__(
@@ -764,7 +761,7 @@ class Tk(Misc, Wm):
     config = configure
     def destroy(self) -> None: ...
     def readprofile(self, baseName: str, className: str) -> None: ...
-    report_callback_exception: _ExceptionReportingCallback
+    report_callback_exception: Callable[[type[BaseException], BaseException, TracebackType | None], object]
     # Tk has __getattr__ so that tk_instance.foo falls back to tk_instance.tk.foo
     # Please keep in sync with _tkinter.TkappType.
     # Some methods are intentionally missing because they are inherited from Misc instead.
diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi
index 80ea40879dee..36cd5f1f6e9d 100644
--- a/mypy/typeshed/stdlib/turtle.pyi
+++ b/mypy/typeshed/stdlib/turtle.pyi
@@ -129,6 +129,9 @@ __all__ = [
     "Terminator",
 ]
 
+if sys.version_info >= (3, 12):
+    __all__ += ["teleport"]
+
 # Note: '_Color' is the alias we use for arguments and _AnyColor is the
 # alias we use for return types. Really, these two aliases should be the
 # same, but as per the "no union returns" typeshed policy, we'll return
@@ -648,6 +651,9 @@ def shape(name: None = None) -> str: ...
 @overload
 def shape(name: str) -> None: ...
 
+if sys.version_info >= (3, 12):
+    def teleport(x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ...
+
 # Unsafely overlaps when no arguments are provided
 @overload
 def shapesize() -> tuple[float, float, float]: ...  # type: ignore[misc]
diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi
index a9bffdf5214f..2c1ebe6d7f95 100644
--- a/mypy/typeshed/stdlib/typing.pyi
+++ b/mypy/typeshed/stdlib/typing.pyi
@@ -126,6 +126,9 @@ if sys.version_info >= (3, 11):
         "reveal_type",
     ]
 
+if sys.version_info >= (3, 12):
+    __all__ += ["TypeAliasType", "override"]
+
 ContextManager = AbstractContextManager
 AsyncContextManager = AbstractAsyncContextManager
 
@@ -323,7 +326,9 @@ AnyStr = TypeVar("AnyStr", str, bytes)  # noqa: Y001
 
 # Technically in 3.7 this inherited from GenericMeta. But let's not reflect that, since
 # type checkers tend to assume that Protocols all have the ABCMeta metaclass.
-class _ProtocolMeta(ABCMeta): ...
+class _ProtocolMeta(ABCMeta):
+    if sys.version_info >= (3, 12):
+        def __init__(cls, *args: Any, **kwargs: Any) -> None: ...
 
 # Abstract base classes.
 
@@ -945,7 +950,7 @@ if sys.version_info >= (3, 10):
 def _type_repr(obj: object) -> str: ...
 
 if sys.version_info >= (3, 12):
-    def override(__arg: _F) -> _F: ...
+    def override(__method: _F) -> _F: ...
     @_final
     class TypeAliasType:
         def __init__(
diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi
index 66120197b269..baf025bdeb5a 100644
--- a/mypy/typeshed/stdlib/unittest/mock.pyi
+++ b/mypy/typeshed/stdlib/unittest/mock.pyi
@@ -106,7 +106,25 @@ class Base:
 # We subclass with "Any" because mocks are explicitly designed to stand in for other types,
 # something that can't be expressed with our static type system.
 class NonCallableMock(Base, Any):
-    def __new__(__cls, *args: Any, **kw: Any) -> Self: ...
+    if sys.version_info >= (3, 12):
+        def __new__(
+            cls,
+            spec: list[str] | object | type[object] | None = None,
+            wraps: Any | None = None,
+            name: str | None = None,
+            spec_set: list[str] | object | type[object] | None = None,
+            parent: NonCallableMock | None = None,
+            _spec_state: Any | None = None,
+            _new_name: str = "",
+            _new_parent: NonCallableMock | None = None,
+            _spec_as_instance: bool = False,
+            _eat_self: bool | None = None,
+            unsafe: bool = False,
+            **kwargs: Any,
+        ) -> Self: ...
+    else:
+        def __new__(__cls, *args: Any, **kw: Any) -> Self: ...
+
     def __init__(
         self,
         spec: list[str] | object | type[object] | None = None,
diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi
index 079c9755528c..a4849dfa2e6e 100644
--- a/mypy/typeshed/stdlib/urllib/request.pyi
+++ b/mypy/typeshed/stdlib/urllib/request.pyi
@@ -235,7 +235,11 @@ class _HTTPConnectionProtocol(Protocol):
     ) -> HTTPConnection: ...
 
 class AbstractHTTPHandler(BaseHandler):  # undocumented
-    def __init__(self, debuglevel: int = 0) -> None: ...
+    if sys.version_info >= (3, 12):
+        def __init__(self, debuglevel: int | None = None) -> None: ...
+    else:
+        def __init__(self, debuglevel: int = 0) -> None: ...
+
     def set_http_debuglevel(self, level: int) -> None: ...
     def do_request_(self, request: Request) -> Request: ...
     def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: ...
@@ -245,9 +249,15 @@ class HTTPHandler(AbstractHTTPHandler):
     def http_request(self, request: Request) -> Request: ...  # undocumented
 
 class HTTPSHandler(AbstractHTTPHandler):
-    def __init__(
-        self, debuglevel: int = 0, context: ssl.SSLContext | None = None, check_hostname: bool | None = None
-    ) -> None: ...
+    if sys.version_info >= (3, 12):
+        def __init__(
+            self, debuglevel: int | None = None, context: ssl.SSLContext | None = None, check_hostname: bool | None = None
+        ) -> None: ...
+    else:
+        def __init__(
+            self, debuglevel: int = 0, context: ssl.SSLContext | None = None, check_hostname: bool | None = None
+        ) -> None: ...
+
     def https_open(self, req: Request) -> HTTPResponse: ...
     def https_request(self, request: Request) -> Request: ...  # undocumented
 
diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi
index ecb98d4269d5..ca5366602ceb 100644
--- a/mypy/typeshed/stdlib/weakref.pyi
+++ b/mypy/typeshed/stdlib/weakref.pyi
@@ -65,6 +65,10 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]):
     def copy(self) -> WeakValueDictionary[_KT, _VT]: ...
     __copy__ = copy
     def __deepcopy__(self, memo: Any) -> Self: ...
+    @overload
+    def get(self, key: _KT, default: None = None) -> _VT | None: ...
+    @overload
+    def get(self, key: _KT, default: _T) -> _VT | _T: ...
     # These are incompatible with Mapping
     def keys(self) -> Iterator[_KT]: ...  # type: ignore[override]
     def values(self) -> Iterator[_VT]: ...  # type: ignore[override]
@@ -107,6 +111,10 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]):
     def copy(self) -> WeakKeyDictionary[_KT, _VT]: ...
     __copy__ = copy
     def __deepcopy__(self, memo: Any) -> Self: ...
+    @overload
+    def get(self, key: _KT, default: None = None) -> _VT | None: ...
+    @overload
+    def get(self, key: _KT, default: _T) -> _VT | _T: ...
     # These are incompatible with Mapping
     def keys(self) -> Iterator[_KT]: ...  # type: ignore[override]
     def values(self) -> Iterator[_VT]: ...  # type: ignore[override]
diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi
index abda7a3b9625..dc07eb3f2a38 100644
--- a/mypy/typeshed/stdlib/zipfile.pyi
+++ b/mypy/typeshed/stdlib/zipfile.pyi
@@ -257,7 +257,11 @@ if sys.version_info >= (3, 8):
             @property
             def open(self) -> _PathOpenProtocol: ...
 
-        def iterdir(self) -> Iterator[Path]: ...
+        if sys.version_info >= (3, 10):
+            def iterdir(self) -> Iterator[Self]: ...
+        else:
+            def iterdir(self) -> Iterator[Path]: ...
+
         def is_dir(self) -> bool: ...
         def is_file(self) -> bool: ...
         def exists(self) -> bool: ...
@@ -274,6 +278,14 @@ if sys.version_info >= (3, 8):
             def joinpath(self, *other: StrPath) -> Path: ...
         else:
             def joinpath(self, add: StrPath) -> Path: ...  # undocumented
+        if sys.version_info >= (3, 12):
+            def glob(self, pattern: str) -> Iterator[Self]: ...
+            def rglob(self, pattern: str) -> Iterator[Self]: ...
+            def is_symlink(self) -> Literal[False]: ...
+            def relative_to(self, other: Path, *extra: StrPath) -> str: ...
+            def match(self, path_pattern: str) -> bool: ...
+            def __eq__(self, other: object) -> bool: ...
+            def __hash__(self) -> int: ...
 
         def __truediv__(self, add: StrPath) -> Path: ...
 

From 9859fe7ba3ae046ed3aa822f869ec4866978eac8 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Mon, 26 Sep 2022 12:55:07 -0700
Subject: [PATCH 131/288] Remove use of LiteralString in builtins (#13743)

---
 mypy/typeshed/stdlib/builtins.pyi | 93 -------------------------------
 1 file changed, 93 deletions(-)

diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index cf4f857c5524..2c7331e87b1a 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -56,7 +56,6 @@ from typing import (  # noqa: Y022
 from typing_extensions import (
     Concatenate,
     Literal,
-    LiteralString,
     ParamSpec,
     Self,
     SupportsIndex,
@@ -442,17 +441,8 @@ class str(Sequence[str]):
     def __new__(cls, object: object = ...) -> Self: ...
     @overload
     def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ...
-    @overload
-    def capitalize(self: LiteralString) -> LiteralString: ...
-    @overload
     def capitalize(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def casefold(self: LiteralString) -> LiteralString: ...
-    @overload
     def casefold(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
-    @overload
     def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
     def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
@@ -460,20 +450,11 @@ class str(Sequence[str]):
         self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
     if sys.version_info >= (3, 8):
-        @overload
-        def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ...
-        @overload
         def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ...  # type: ignore[misc]
     else:
-        @overload
-        def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ...
-        @overload
         def expandtabs(self, tabsize: int = 8) -> str: ...  # type: ignore[misc]
 
     def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
-    @overload
-    def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ...
-    @overload
     def format(self, *args: object, **kwargs: object) -> str: ...
     def format_map(self, map: _FormatMapMapping) -> str: ...
     def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
@@ -489,91 +470,32 @@ class str(Sequence[str]):
     def isspace(self) -> bool: ...
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
-    @overload
-    def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ...
-    @overload
     def join(self, __iterable: Iterable[str]) -> str: ...  # type: ignore[misc]
-    @overload
-    def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
-    @overload
     def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
-    @overload
-    def lower(self: LiteralString) -> LiteralString: ...
-    @overload
     def lower(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
-    @overload
     def lstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
-    @overload
-    def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
-    @overload
     def partition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
-    @overload
-    def replace(
-        self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1
-    ) -> LiteralString: ...
-    @overload
     def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ...  # type: ignore[misc]
     if sys.version_info >= (3, 9):
-        @overload
-        def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ...
-        @overload
         def removeprefix(self, __prefix: str) -> str: ...  # type: ignore[misc]
-        @overload
-        def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ...
-        @overload
         def removesuffix(self, __suffix: str) -> str: ...  # type: ignore[misc]
 
     def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
-    @overload
-    def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
-    @overload
     def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
-    @overload
-    def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
-    @overload
     def rpartition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
-    @overload
-    def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
-    @overload
     def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
-    @overload
-    def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
-    @overload
     def rstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
-    @overload
-    def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
-    @overload
     def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
-    @overload
-    def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ...
-    @overload
     def splitlines(self, keepends: bool = False) -> list[str]: ...  # type: ignore[misc]
     def startswith(
         self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
-    @overload
-    def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
-    @overload
     def strip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
-    @overload
-    def swapcase(self: LiteralString) -> LiteralString: ...
-    @overload
     def swapcase(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def title(self: LiteralString) -> LiteralString: ...
-    @overload
     def title(self) -> str: ...  # type: ignore[misc]
     def translate(self, __table: _TranslateTable) -> str: ...
-    @overload
-    def upper(self: LiteralString) -> LiteralString: ...
-    @overload
     def upper(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ...
-    @overload
     def zfill(self, __width: SupportsIndex) -> str: ...  # type: ignore[misc]
     @staticmethod
     @overload
@@ -584,9 +506,6 @@ class str(Sequence[str]):
     @staticmethod
     @overload
     def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ...
-    @overload
-    def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ...
-    @overload
     def __add__(self, __value: str) -> str: ...  # type: ignore[misc]
     # Incompatible with Sequence.__contains__
     def __contains__(self, __key: str) -> bool: ...  # type: ignore[override]
@@ -595,25 +514,13 @@ class str(Sequence[str]):
     def __getitem__(self, __key: SupportsIndex | slice) -> str: ...
     def __gt__(self, __value: str) -> bool: ...
     def __hash__(self) -> int: ...
-    @overload
-    def __iter__(self: LiteralString) -> Iterator[LiteralString]: ...
-    @overload
     def __iter__(self) -> Iterator[str]: ...  # type: ignore[misc]
     def __le__(self, __value: str) -> bool: ...
     def __len__(self) -> int: ...
     def __lt__(self, __value: str) -> bool: ...
-    @overload
-    def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ...
-    @overload
     def __mod__(self, __value: Any) -> str: ...
-    @overload
-    def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
-    @overload
     def __mul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __ne__(self, __value: object) -> bool: ...
-    @overload
-    def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
-    @overload
     def __rmul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __getnewargs__(self) -> tuple[str]: ...
 

From 378a866e90f534a0f0e9bbbca1324317ba784bbb Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 29 Oct 2022 12:47:21 -0700
Subject: [PATCH 132/288] Revert sum literal integer change (#13961)

This is allegedly causing large performance problems, see 13821

typeshed/8231 had zero hits on mypy_primer, so it's not the worst thing
to undo. Patching this in typeshed also feels weird, since there's a
more general soundness issue. If a typevar has a bound or constraint, we
might not want to solve it to a Literal.

If we can confirm the performance regression or fix the unsoundness
within mypy, I might pursue upstreaming this in typeshed.

(Reminder: add this to the sync_typeshed script once merged)
---
 mypy/typeshed/stdlib/builtins.pyi | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index 2c7331e87b1a..9e413579e0fb 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -1698,11 +1698,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit
 # Instead, we special-case the most common examples of this: bool and literal integers.
 if sys.version_info >= (3, 8):
     @overload
-    def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool], start: int = 0) -> int: ...  # type: ignore[misc]
 
 else:
     @overload
-    def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ...  # type: ignore[misc]
 
 @overload
 def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ...

From 2816b97d5c61355d089b291b861df4c64f4aa96a Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Mon, 1 May 2023 20:34:55 +0100
Subject: [PATCH 133/288] Revert typeshed ctypes change Since the plugin
 provides superior type checking:
 https://github.com/python/mypy/pull/13987#issuecomment-1310863427 A manual
 cherry-pick of e437cdf.

---
 mypy/typeshed/stdlib/_ctypes.pyi | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi
index 1f15ac057988..538c07d54aad 100644
--- a/mypy/typeshed/stdlib/_ctypes.pyi
+++ b/mypy/typeshed/stdlib/_ctypes.pyi
@@ -159,11 +159,7 @@ class Array(_CData, Generic[_CT]):
     def _type_(self) -> type[_CT]: ...
     @_type_.setter
     def _type_(self, value: type[_CT]) -> None: ...
-    # Note: only available if _CT == c_char
-    @property
-    def raw(self) -> bytes: ...
-    @raw.setter
-    def raw(self, value: ReadableBuffer) -> None: ...
+    raw: bytes  # Note: only available if _CT == c_char
     value: Any  # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
     # TODO These methods cannot be annotated correctly at the moment.
     # All of these "Any"s stand for the array's element type, but it's not possible to use _CT

From 7d987a1056e4c6ee6f75aa4841f7e0c73ca9b496 Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Sat, 4 Mar 2023 13:14:11 +0000
Subject: [PATCH 134/288] Revert use of `ParamSpec` for `functools.wraps`

---
 mypy/typeshed/stdlib/functools.pyi | 40 +++++++++++-------------------
 1 file changed, 14 insertions(+), 26 deletions(-)

diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi
index 0d08cdb19e3f..1b4e59b7c120 100644
--- a/mypy/typeshed/stdlib/functools.pyi
+++ b/mypy/typeshed/stdlib/functools.pyi
@@ -1,9 +1,9 @@
 import sys
 import types
-from _typeshed import SupportsAllComparisons, SupportsItems
+from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems
 from collections.abc import Callable, Hashable, Iterable, Sequence, Sized
 from typing import Any, Generic, NamedTuple, TypeVar, overload
-from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final
+from typing_extensions import Literal, Self, TypeAlias, TypedDict, final
 
 if sys.version_info >= (3, 9):
     from types import GenericAlias
@@ -28,12 +28,10 @@ if sys.version_info >= (3, 8):
 if sys.version_info >= (3, 9):
     __all__ += ["cache"]
 
+_AnyCallable: TypeAlias = Callable[..., object]
+
 _T = TypeVar("_T")
 _S = TypeVar("_S")
-_PWrapped = ParamSpec("_PWrapped")
-_RWrapped = TypeVar("_RWrapped")
-_PWrapper = ParamSpec("_PWrapper")
-_RWrapper = TypeVar("_RWrapper")
 
 @overload
 def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ...
@@ -87,41 +85,31 @@ else:
     ]
 WRAPPER_UPDATES: tuple[Literal["__dict__"]]
 
-class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWrapper]):
-    __wrapped__: Callable[_PWrapped, _RWrapped]
-    def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWrapper: ...
-    # as with ``Callable``, we'll assume that these attributes exist
-    __name__: str
-    __qualname__: str
-
-class _Wrapper(Generic[_PWrapped, _RWrapped]):
-    def __call__(self, f: Callable[_PWrapper, _RWrapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
-
 if sys.version_info >= (3, 12):
     def update_wrapper(
-        wrapper: Callable[_PWrapper, _RWrapper],
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapper: _T,
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
+    ) -> _T: ...
     def wraps(
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
+    ) -> IdentityFunction: ...
 
 else:
     def update_wrapper(
-        wrapper: Callable[_PWrapper, _RWrapper],
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapper: _T,
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
+    ) -> _T: ...
     def wraps(
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
+    ) -> IdentityFunction: ...
 
 def total_ordering(cls: type[_T]) -> type[_T]: ...
 def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ...

From ec665cc8b4f59e81ec28ea946bc673cb20028751 Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Fri, 15 Sep 2023 20:50:25 +0100
Subject: [PATCH 135/288] Fix the newly-uncovered stubtest bug

---
 mypy/stubtest.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index a804835a632b..a5028581f7a1 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -1686,7 +1686,7 @@ def get_importable_stdlib_modules() -> set[str]:
                 modules_by_finder[m.module_finder].add(m.name)
         for finder, module_group in modules_by_finder.items():
             if (
-                "site-packages" not in Path(finder.path).parents
+                "site-packages" not in Path(finder.path).parts
                 # if "_queue" is present, it's most likely the module finder
                 # for stdlib extension modules;
                 # if "queue" is present, it's most likely the module finder

From 0222bf492e035ab1062a6d6fc38abc249a8ae211 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Sat, 16 Sep 2023 21:57:54 +0100
Subject: [PATCH 136/288] Update hashes in `sync-typeshed.py` following
 typeshed sync (#16126)

Followup to #16121
---
 misc/sync-typeshed.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py
index 36967f86262e..77f921a89b1b 100644
--- a/misc/sync-typeshed.py
+++ b/misc/sync-typeshed.py
@@ -179,10 +179,10 @@ def main() -> None:
     print("Created typeshed sync commit.")
 
     commits_to_cherry_pick = [
-        "2f6b6e66c",  # LiteralString reverts
-        "120af30e7",  # sum reverts
-        "1866d28f1",  # ctypes reverts
-        "3240da455",  # ParamSpec for functools.wraps
+        "9859fe7ba",  # LiteralString reverts
+        "378a866e9",  # sum reverts
+        "2816b97d5",  # ctypes reverts
+        "7d987a105",  # ParamSpec for functools.wraps
     ]
     for commit in commits_to_cherry_pick:
         try:

From b65cd9ae6e1ae4b25e4af4f0e855646bbe382b29 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Sun, 17 Sep 2023 05:39:48 -0400
Subject: [PATCH 137/288] dataclass.replace: allow transformed classes (#15915)

We [already
synthesize](https://github.com/python/mypy/issues/15843#issuecomment-1685159995)
`__dataclass_fields__` for all classes including
`@dataclass_transform`'d ones, thus assume more than PEP-681 says. We
might as well assume `dataclasses.replace` applies on all same classes.
This way we risk false positive since it'll raise in runtime.

Fixes #15843.
---
 mypy/plugins/dataclasses.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py
index 99f079705c3f..f2ae3fd3d01e 100644
--- a/mypy/plugins/dataclasses.py
+++ b/mypy/plugins/dataclasses.py
@@ -375,9 +375,7 @@ def transform(self) -> bool:
             add_attribute_to_class(self._api, self._cls, "__match_args__", match_args_type)
 
         self._add_dataclass_fields_magic_attribute()
-
-        if self._spec is _TRANSFORM_SPEC_FOR_DATACLASSES:
-            self._add_internal_replace_method(attributes)
+        self._add_internal_replace_method(attributes)
         if "__post_init__" in info.names:
             self._add_internal_post_init_method(attributes)
 

From c99b93646c7edb5ae33a84c9b322b289b97e0117 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sun, 17 Sep 2023 05:22:59 -0700
Subject: [PATCH 138/288] Fix mypyc regression with pretty (#16124)

Fixes #15877

Regression was introduced by #15070. Previously Errors objects created
in mypyc build would just use all the default values, now they use the
actual options object involved
---
 mypy/errors.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/mypy/errors.py b/mypy/errors.py
index a678b790cb8c..4e62a48aeb27 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -909,8 +909,7 @@ def file_messages(self, path: str) -> list[str]:
             return []
         self.flushed_files.add(path)
         source_lines = None
-        if self.options.pretty:
-            assert self.read_source
+        if self.options.pretty and self.read_source:
             source_lines = self.read_source(path)
         return self.format_messages(self.error_info_map[path], source_lines)
 

From bf7eab682a2bc63ec90e868610231a105a9b415f Mon Sep 17 00:00:00 2001
From: Hamir Mahal <hamirmahal@gmail.com>
Date: Sun, 17 Sep 2023 13:30:30 -0700
Subject: [PATCH 139/288] Use comments in issue template (#15742)

Fixes #15741
---
 .github/ISSUE_TEMPLATE/feature.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/ISSUE_TEMPLATE/feature.md b/.github/ISSUE_TEMPLATE/feature.md
index 135bc2bd3b94..984e552e51b1 100644
--- a/.github/ISSUE_TEMPLATE/feature.md
+++ b/.github/ISSUE_TEMPLATE/feature.md
@@ -6,8 +6,8 @@ labels: "feature"
 
 **Feature**
 
-(A clear and concise description of your feature proposal.)
+<!-- Enter a clear and concise description of your feature proposal here. -->
 
 **Pitch**
 
-(Please explain why this feature should be implemented and how it would be used. Add examples, if applicable.)
+<!-- Please explain why this feature should be implemented and how it would be used. Add examples, if applicable. -->

From 9b9152484c6b1ba3934373ca0c7600f71392fb06 Mon Sep 17 00:00:00 2001
From: Hamir Mahal <hamirmahal@gmail.com>
Date: Sun, 17 Sep 2023 13:32:20 -0700
Subject: [PATCH 140/288] Make it easier to copy commands from docs README
 (#16133)

Fixes #16132.
---
 docs/README.md | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/docs/README.md b/docs/README.md
index 0d574c9213a5..e72164c78560 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -15,13 +15,13 @@ Install Sphinx and other dependencies (i.e. theme) needed for the documentation.
 From the `docs` directory, use `pip`:
 
 ```
-$ pip install -r requirements-docs.txt
+pip install -r requirements-docs.txt
 ```
 
 Build the documentation like this:
 
 ```
-$ make html
+make html
 ```
 
 The built documentation will be placed in the `docs/build` directory. Open
@@ -33,13 +33,13 @@ Helpful documentation build commands
 Clean the documentation build:
 
 ```
-$ make clean
+make clean
 ```
 
 Test and check the links found in the documentation:
 
 ```
-$ make linkcheck
+make linkcheck
 ```
 
 Documentation on Read The Docs

From 1dcff0d2235ba6570f290a126f1bdd762f2d4991 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Mon, 18 Sep 2023 20:50:32 -0700
Subject: [PATCH 141/288] Preserve implicitly exported types via attribute
 access (#16129)

Resolves #13965. Follow up to #13967. Unblocks #14086
---
 mypy/checkmember.py               | 15 ++++++++++++++-
 test-data/unit/check-flags.test   | 26 +++++++++++++++++++-------
 test-data/unit/check-modules.test |  3 ++-
 3 files changed, 35 insertions(+), 9 deletions(-)

diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 59af0d402e14..4316a59281c3 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -24,6 +24,7 @@
     FuncDef,
     IndexExpr,
     MypyFile,
+    NameExpr,
     OverloadedFuncDef,
     SymbolNode,
     SymbolTable,
@@ -608,7 +609,19 @@ def analyze_member_var_access(
         mx.msg.undefined_in_superclass(name, mx.context)
         return AnyType(TypeOfAny.from_error)
     else:
-        return report_missing_attribute(mx.original_type, itype, name, mx)
+        ret = report_missing_attribute(mx.original_type, itype, name, mx)
+        # Avoid paying double jeopardy if we can't find the member due to --no-implicit-reexport
+        if (
+            mx.module_symbol_table is not None
+            and name in mx.module_symbol_table
+            and not mx.module_symbol_table[name].module_public
+        ):
+            v = mx.module_symbol_table[name].node
+            e = NameExpr(name)
+            e.set_line(mx.context)
+            e.node = v
+            return mx.chk.expr_checker.analyze_ref_expr(e, lvalue=mx.is_lvalue)
+        return ret
 
 
 def check_final_member(name: str, info: TypeInfo, msg: MessageBuilder, ctx: Context) -> None:
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
index 96f78d81dd16..06b7cab8391b 100644
--- a/test-data/unit/check-flags.test
+++ b/test-data/unit/check-flags.test
@@ -1611,14 +1611,22 @@ from other_module_2 import a  # E: Module "other_module_2" does not explicitly e
 reveal_type(a)  # N: Revealed type is "builtins.int"
 
 import other_module_2
-# TODO: this should also reveal builtins.int, see #13965
-reveal_type(other_module_2.a)  # E: "object" does not explicitly export attribute "a"  [attr-defined] \
-                               # N: Revealed type is "Any"
+reveal_type(other_module_2.a)  # E: Module "other_module_2" does not explicitly export attribute "a"  [attr-defined] \
+                               # N: Revealed type is "builtins.int"
+
+from other_module_2 import b  # E: Module "other_module_2" does not explicitly export attribute "b"  [attr-defined]
+reveal_type(b)  # N: Revealed type is "def (a: builtins.int) -> builtins.str"
+
+import other_module_2
+reveal_type(other_module_2.b)  # E: Module "other_module_2" does not explicitly export attribute "b"  [attr-defined] \
+                               # N: Revealed type is "def (a: builtins.int) -> builtins.str"
 
 [file other_module_1.py]
 a = 5
+def b(a: int) -> str: ...
 [file other_module_2.py]
-from other_module_1 import a
+from other_module_1 import a, b
+[builtins fixtures/module.pyi]
 
 [case testNoImplicitReexportRespectsAll]
 # flags: --no-implicit-reexport
@@ -1649,11 +1657,15 @@ __all__ = ('b',)
 [case testNoImplicitReexportGetAttr]
 # flags: --no-implicit-reexport --python-version 3.7
 from other_module_2 import a  # E: Module "other_module_2" does not explicitly export attribute "a"
+reveal_type(a)  # N: Revealed type is "builtins.int"
+from other_module_2 import b  # E: Module "other_module_2" does not explicitly export attribute "b"
+reveal_type(b)  # N: Revealed type is "builtins.str"
 [file other_module_1.py]
-from typing import Any
-def __getattr__(name: str) -> Any: ...
+b: str = "asdf"
+def __getattr__(name: str) -> int: ...
 [file other_module_2.py]
-from other_module_1 import a
+from other_module_1 import a, b
+def __getattr__(name: str) -> bytes: ...
 [builtins fixtures/tuple.pyi]
 
 [case textNoImplicitReexportSuggestions]
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
index 94368f6c1113..abbdf4987c46 100644
--- a/test-data/unit/check-modules.test
+++ b/test-data/unit/check-modules.test
@@ -1862,7 +1862,8 @@ import stub
 
 reveal_type(stub.y)  # N: Revealed type is "builtins.int"
 reveal_type(stub.z)  # E: Module "stub" does not explicitly export attribute "z" \
-                     # N: Revealed type is "Any"
+                     # N: Revealed type is "builtins.int"
+
 
 [file stub.pyi]
 from substub import y as y

From ba978f461e1f88327f9caa2e83774caaaeee1a6a Mon Sep 17 00:00:00 2001
From: Petter Friberg <petter@5monkeys.se>
Date: Tue, 19 Sep 2023 08:10:31 +0200
Subject: [PATCH 142/288] Call dynamic class hook on generic classes (#16052)

Fixes: #8359

CC @sobolevn

`get_dynamic_class_hook()` will now additionally be called for generic
classes with parameters. e.g.

```python
y = SomeGenericClass[type, ...].method()
```
---
 mypy/semanal.py                               |  7 ++++
 test-data/unit/check-custom-plugin.test       | 12 +++++-
 .../unit/plugins/dyn_class_from_method.py     | 40 ++++++++++++++++++-
 3 files changed, 57 insertions(+), 2 deletions(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index 70403eed57ae..e19cd86d5e89 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -3205,6 +3205,13 @@ def apply_dynamic_class_hook(self, s: AssignmentStmt) -> None:
                 if isinstance(callee_expr, RefExpr) and callee_expr.fullname:
                     method_name = call.callee.name
                     fname = callee_expr.fullname + "." + method_name
+                elif (
+                    isinstance(callee_expr, IndexExpr)
+                    and isinstance(callee_expr.base, RefExpr)
+                    and isinstance(callee_expr.analyzed, TypeApplication)
+                ):
+                    method_name = call.callee.name
+                    fname = callee_expr.base.fullname + "." + method_name
                 elif isinstance(callee_expr, CallExpr):
                     # check if chain call
                     call = callee_expr
diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test
index 22374d09cf9f..63529cf165ce 100644
--- a/test-data/unit/check-custom-plugin.test
+++ b/test-data/unit/check-custom-plugin.test
@@ -684,12 +684,16 @@ plugins=<ROOT>/test-data/unit/plugins/dyn_class.py
 [case testDynamicClassHookFromClassMethod]
 # flags: --config-file tmp/mypy.ini
 
-from mod import QuerySet, Manager
+from mod import QuerySet, Manager, GenericQuerySet
 
 MyManager = Manager.from_queryset(QuerySet)
+ManagerFromGenericQuerySet = GenericQuerySet[int].as_manager()
 
 reveal_type(MyManager())  # N: Revealed type is "__main__.MyManager"
 reveal_type(MyManager().attr)  # N: Revealed type is "builtins.str"
+reveal_type(ManagerFromGenericQuerySet())  # N: Revealed type is "__main__.ManagerFromGenericQuerySet"
+reveal_type(ManagerFromGenericQuerySet().attr)  # N: Revealed type is "builtins.int"
+queryset: GenericQuerySet[int] = ManagerFromGenericQuerySet()
 
 def func(manager: MyManager) -> None:
     reveal_type(manager)   # N: Revealed type is "__main__.MyManager"
@@ -704,6 +708,12 @@ class QuerySet:
 class Manager:
     @classmethod
     def from_queryset(cls, queryset_cls: Type[QuerySet]): ...
+T = TypeVar("T")
+class GenericQuerySet(Generic[T]):
+    attr: T
+
+    @classmethod
+    def as_manager(cls): ...
 
 [builtins fixtures/classmethod.pyi]
 [file mypy.ini]
diff --git a/test-data/unit/plugins/dyn_class_from_method.py b/test-data/unit/plugins/dyn_class_from_method.py
index b84754654084..2630b16be66e 100644
--- a/test-data/unit/plugins/dyn_class_from_method.py
+++ b/test-data/unit/plugins/dyn_class_from_method.py
@@ -2,7 +2,19 @@
 
 from typing import Callable
 
-from mypy.nodes import GDEF, Block, ClassDef, RefExpr, SymbolTable, SymbolTableNode, TypeInfo
+from mypy.nodes import (
+    GDEF,
+    Block,
+    ClassDef,
+    IndexExpr,
+    MemberExpr,
+    NameExpr,
+    RefExpr,
+    SymbolTable,
+    SymbolTableNode,
+    TypeApplication,
+    TypeInfo,
+)
 from mypy.plugin import DynamicClassDefContext, Plugin
 from mypy.types import Instance
 
@@ -13,6 +25,8 @@ def get_dynamic_class_hook(
     ) -> Callable[[DynamicClassDefContext], None] | None:
         if "from_queryset" in fullname:
             return add_info_hook
+        if "as_manager" in fullname:
+            return as_manager_hook
         return None
 
 
@@ -34,5 +48,29 @@ def add_info_hook(ctx: DynamicClassDefContext) -> None:
     ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
 
 
+def as_manager_hook(ctx: DynamicClassDefContext) -> None:
+    class_def = ClassDef(ctx.name, Block([]))
+    class_def.fullname = ctx.api.qualified_name(ctx.name)
+
+    info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id)
+    class_def.info = info
+    assert isinstance(ctx.call.callee, MemberExpr)
+    assert isinstance(ctx.call.callee.expr, IndexExpr)
+    assert isinstance(ctx.call.callee.expr.analyzed, TypeApplication)
+    assert isinstance(ctx.call.callee.expr.analyzed.expr, NameExpr)
+
+    queryset_type_fullname = ctx.call.callee.expr.analyzed.expr.fullname
+    queryset_node = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname)
+    assert queryset_node is not None
+    queryset_info = queryset_node.node
+    assert isinstance(queryset_info, TypeInfo)
+    parameter_type = ctx.call.callee.expr.analyzed.types[0]
+
+    obj = ctx.api.named_type("builtins.object")
+    info.mro = [info, queryset_info, obj.type]
+    info.bases = [Instance(queryset_info, [parameter_type])]
+    ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info))
+
+
 def plugin(version: str) -> type[DynPlugin]:
     return DynPlugin

From 249f3f8285d9d2a0f77273ace805dac0eef684c6 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Mon, 18 Sep 2023 23:53:31 -0700
Subject: [PATCH 143/288] Fix inference for overloaded __call__ with generic
 self (#16053)

Fixes #8283

Co-authored-by: ilevkivskyi
---
 mypy/checkexpr.py                     |  4 ++-
 mypy/checkmember.py                   | 13 ++++---
 mypy/subtypes.py                      | 51 +++++++++++++++------------
 test-data/unit/check-overloading.test | 24 +++++++++++++
 test-data/unit/check-tuples.test      | 14 ++++++++
 5 files changed, 76 insertions(+), 30 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index f46c8cb15c6f..7b9b84938930 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -1475,6 +1475,7 @@ def check_call(
         callable_node: Expression | None = None,
         callable_name: str | None = None,
         object_type: Type | None = None,
+        original_type: Type | None = None,
     ) -> tuple[Type, Type]:
         """Type check a call.
 
@@ -1537,7 +1538,7 @@ def check_call(
                 is_super=False,
                 is_operator=True,
                 msg=self.msg,
-                original_type=callee,
+                original_type=original_type or callee,
                 chk=self.chk,
                 in_literal_context=self.is_literal_context(),
             )
@@ -1578,6 +1579,7 @@ def check_call(
                 callable_node,
                 callable_name,
                 object_type,
+                original_type=callee,
             )
         else:
             return self.msg.not_callable(callee, context), AnyType(TypeOfAny.from_error)
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 4316a59281c3..1557b62917dc 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -331,13 +331,12 @@ def analyze_instance_member_access(
             signature = method.type
         signature = freshen_all_functions_type_vars(signature)
         if not method.is_static:
-            if name != "__call__":
-                # TODO: use proper treatment of special methods on unions instead
-                #       of this hack here and below (i.e. mx.self_type).
-                dispatched_type = meet.meet_types(mx.original_type, typ)
-                signature = check_self_arg(
-                    signature, dispatched_type, method.is_class, mx.context, name, mx.msg
-                )
+            # TODO: use proper treatment of special methods on unions instead
+            #       of this hack here and below (i.e. mx.self_type).
+            dispatched_type = meet.meet_types(mx.original_type, typ)
+            signature = check_self_arg(
+                signature, dispatched_type, method.is_class, mx.context, name, mx.msg
+            )
             signature = bind_self(signature, mx.self_type, is_classmethod=method.is_class)
         # TODO: should we skip these steps for static methods as well?
         # Since generic static methods should not be allowed.
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 9ed2e4af4051..c5399db0a494 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -454,19 +454,22 @@ def visit_instance(self, left: Instance) -> bool:
                     if isinstance(unpacked, Instance):
                         return self._is_subtype(left, unpacked)
             if left.type.has_base(right.partial_fallback.type.fullname):
-                # Special case to consider Foo[*tuple[Any, ...]] (i.e. bare Foo) a
-                # subtype of Foo[<whatever>], when Foo is user defined variadic tuple type.
-                mapped = map_instance_to_supertype(left, right.partial_fallback.type)
-                if len(mapped.args) == 1 and isinstance(mapped.args[0], UnpackType):
-                    unpacked = get_proper_type(mapped.args[0].type)
-                    if isinstance(unpacked, Instance):
-                        assert unpacked.type.fullname == "builtins.tuple"
-                        if isinstance(get_proper_type(unpacked.args[0]), AnyType):
-                            return not self.proper_subtype
-                if mapped.type.fullname == "builtins.tuple" and isinstance(
-                    get_proper_type(mapped.args[0]), AnyType
-                ):
-                    return not self.proper_subtype
+                if not self.proper_subtype:
+                    # Special case to consider Foo[*tuple[Any, ...]] (i.e. bare Foo) a
+                    # subtype of Foo[<whatever>], when Foo is user defined variadic tuple type.
+                    mapped = map_instance_to_supertype(left, right.partial_fallback.type)
+                    for arg in map(get_proper_type, mapped.args):
+                        if isinstance(arg, UnpackType):
+                            unpacked = get_proper_type(arg.type)
+                            if not isinstance(unpacked, Instance):
+                                break
+                            assert unpacked.type.fullname == "builtins.tuple"
+                            if not isinstance(get_proper_type(unpacked.args[0]), AnyType):
+                                break
+                        elif not isinstance(arg, AnyType):
+                            break
+                    else:
+                        return True
             return False
         if isinstance(right, TypeVarTupleType):
             # tuple[Any, ...] is like Any in the world of tuples (see special case above).
@@ -534,15 +537,19 @@ def visit_instance(self, left: Instance) -> bool:
                     right_args = (
                         right_prefix + (TupleType(list(right_middle), fallback),) + right_suffix
                     )
-                    if len(t.args) == 1 and isinstance(t.args[0], UnpackType):
-                        unpacked = get_proper_type(t.args[0].type)
-                        if isinstance(unpacked, Instance):
-                            assert unpacked.type.fullname == "builtins.tuple"
-                            if (
-                                isinstance(get_proper_type(unpacked.args[0]), AnyType)
-                                and not self.proper_subtype
-                            ):
-                                return True
+                    if not self.proper_subtype:
+                        for arg in map(get_proper_type, t.args):
+                            if isinstance(arg, UnpackType):
+                                unpacked = get_proper_type(arg.type)
+                                if not isinstance(unpacked, Instance):
+                                    break
+                                assert unpacked.type.fullname == "builtins.tuple"
+                                if not isinstance(get_proper_type(unpacked.args[0]), AnyType):
+                                    break
+                            elif not isinstance(arg, AnyType):
+                                break
+                        else:
+                            return True
                     type_params = zip(left_args, right_args, right.type.defn.type_vars)
                 else:
                     type_params = zip(t.args, right.args, right.type.defn.type_vars)
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index 4546c7171856..443a6fb5cb10 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -6650,3 +6650,27 @@ def d(x: int) -> int: ...
 def d(f: int, *, x: int) -> str: ...
 def d(*args, **kwargs): ...
 [builtins fixtures/tuple.pyi]
+
+[case testOverloadCallableGenericSelf]
+from typing import Any, TypeVar, Generic, overload, reveal_type
+
+T = TypeVar("T")
+
+class MyCallable(Generic[T]):
+    def __init__(self, t: T):
+        self.t = t
+
+    @overload
+    def __call__(self: "MyCallable[int]") -> str: ...
+    @overload
+    def __call__(self: "MyCallable[str]") -> int: ...
+    def __call__(self): ...
+
+c = MyCallable(5)
+reveal_type(c)  # N: Revealed type is "__main__.MyCallable[builtins.int]"
+reveal_type(c())  # N: Revealed type is "builtins.str"
+
+c2 = MyCallable("test")
+reveal_type(c2)  # N: Revealed type is "__main__.MyCallable[builtins.str]"
+reveal_type(c2()) # should be int  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 391fa20db738..ed2c3550a04e 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -1434,7 +1434,21 @@ def foo(o: CallableTuple) -> int:
 class CallableTuple(Tuple[str, int]):
     def __call__(self, n: int, m: int) -> int:
         return n
+[builtins fixtures/tuple.pyi]
+
+[case testTypeTupleGenericCall]
+from typing import Generic, Tuple, TypeVar
+
+T = TypeVar('T')
 
+def foo(o: CallableTuple[int]) -> int:
+    reveal_type(o)  # N: Revealed type is "Tuple[builtins.str, builtins.int, fallback=__main__.CallableTuple[builtins.int]]"
+    reveal_type(o.count(3))  # N: Revealed type is "builtins.int"
+    return o(1, 2)
+
+class CallableTuple(Tuple[str, T]):
+    def __call__(self, n: int, m: int) -> int:
+        return n
 [builtins fixtures/tuple.pyi]
 
 [case testTupleCompatibleWithSequence]

From c9929e2c906d377ca7026c4be10f88a1bd7ecff1 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Tue, 19 Sep 2023 18:42:06 +0300
Subject: [PATCH 144/288] Fix crash on dataclass field / property collision
 (#16147)

I think the current error message is enough:
https://github.com/python/mypy/issues/16141

CC @ikonst and @hauntsaninja
---
 mypy/plugins/dataclasses.py           |  5 +++++
 test-data/unit/check-dataclasses.test | 12 ++++++++++++
 2 files changed, 17 insertions(+)

diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py
index f2ae3fd3d01e..a51b393fcbc4 100644
--- a/mypy/plugins/dataclasses.py
+++ b/mypy/plugins/dataclasses.py
@@ -23,6 +23,7 @@
     ClassDef,
     Context,
     DataclassTransformSpec,
+    Decorator,
     Expression,
     FuncDef,
     FuncItem,
@@ -575,6 +576,10 @@ def collect_attributes(self) -> list[DataclassAttribute] | None:
                 # but the only alternative would be to modify the SymbolTable,
                 # and it's a little hairy to do that in a plugin.
                 continue
+            if isinstance(node, Decorator):
+                # This might be a property / field name clash.
+                # We will issue an error later.
+                continue
 
             assert isinstance(node, Var)
 
diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test
index 35df84658259..d37ae569cc5e 100644
--- a/test-data/unit/check-dataclasses.test
+++ b/test-data/unit/check-dataclasses.test
@@ -2519,3 +2519,15 @@ a: MyDataclass
 b = [a, a]  # trigger joining the types
 
 [builtins fixtures/dataclasses.pyi]
+
+[case testPropertyAndFieldRedefinitionNoCrash]
+from dataclasses import dataclass
+
+@dataclass
+class Foo:
+    @property
+    def c(self) -> int:
+        return 0
+
+    c: int  # E: Name "c" already defined on line 5
+[builtins fixtures/dataclasses.pyi]

From 7089a7fe635cfbed2916bb4f67243b317ccf37ea Mon Sep 17 00:00:00 2001
From: Anders Kaseorg <andersk@mit.edu>
Date: Tue, 19 Sep 2023 22:24:36 -0700
Subject: [PATCH 145/288] Do not consider `import a.b as b` an explicit
 reexport (#14086)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

The point of the `import a as a` and `from a import b as b` syntax for
explicit reexport is that it indicates an intention to do something
different from the ordinary `import a` and `from a import b`.

That is not the case with `import a.b as b`. Even mypy’s own code
includes `import mypy.types as types`, which was not intended to be a
reexport; if it were, it would be written `from mypy import types as
types`.

Pyright agrees that `import a.b as b` should not reexport.

Signed-off-by: Anders Kaseorg <andersk@mit.edu>
---
 mypy/semanal.py                   | 2 +-
 test-data/unit/check-modules.test | 8 ++++++++
 2 files changed, 9 insertions(+), 1 deletion(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index e19cd86d5e89..6e103e5d382c 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -2466,7 +2466,7 @@ def visit_import(self, i: Import) -> None:
             if as_id is not None:
                 base_id = id
                 imported_id = as_id
-                module_public = use_implicit_reexport or id.split(".")[-1] == as_id
+                module_public = use_implicit_reexport or id == as_id
             else:
                 base_id = id.split(".")[0]
                 imported_id = base_id
diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test
index abbdf4987c46..44585fdd8d1a 100644
--- a/test-data/unit/check-modules.test
+++ b/test-data/unit/check-modules.test
@@ -1818,6 +1818,8 @@ m = n  # E: Cannot assign multiple modules to name "m" without explicit "types.M
 from stub import Iterable  # E: Module "stub" does not explicitly export attribute "Iterable"
 from stub import D  # E: Module "stub" does not explicitly export attribute "D"
 from stub import C
+from stub import foo
+from stub import bar  # E: Module "stub" does not explicitly export attribute "bar"
 
 c = C()
 reveal_type(c.x)  # N: Revealed type is "builtins.int"
@@ -1828,6 +1830,8 @@ reveal_type(it)  # N: Revealed type is "typing.Iterable[builtins.int]"
 from typing import Iterable
 from substub import C as C
 from substub import C as D
+from package import foo as foo
+import package.bar as bar
 
 def fun(x: Iterable[str]) -> Iterable[int]: pass
 
@@ -1835,6 +1839,10 @@ def fun(x: Iterable[str]) -> Iterable[int]: pass
 class C:
     x: int
 
+[file package/foo.pyi]
+
+[file package/bar.pyi]
+
 [builtins fixtures/module.pyi]
 
 [case testNoReExportFromStubsMemberType]

From ff81a1c7abc91d9984fc73b9f2b9eab198001c8e Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Wed, 20 Sep 2023 13:50:48 +0300
Subject: [PATCH 146/288] Remove `is_classmethod_class` slot from
 `CallableType` (#16151)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

This slot was not used anywhere:

```
» ag is_classmethod_class .

```

Moreover, since it was not initialized this code was failing with
`AttributeError`:

```python
x: CallableType
for i in dir(x):
    print(i, getattr(x, i))  # failing on `is_classmethod_class`
```
---
 mypy/types.py | 2 --
 1 file changed, 2 deletions(-)

diff --git a/mypy/types.py b/mypy/types.py
index 22fcd601d6a0..2b5aec7789f7 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1763,8 +1763,6 @@ class CallableType(FunctionLike):
         "definition",  # For error messages.  May be None.
         "variables",  # Type variables for a generic function
         "is_ellipsis_args",  # Is this Callable[..., t] (with literal '...')?
-        "is_classmethod_class",  # Is this callable constructed for the benefit
-        # of a classmethod's 'cls' argument?
         "implicit",  # Was this type implicitly generated instead of explicitly
         # specified by the user?
         "special_sig",  # Non-None for signatures that require special handling

From 9edda9a79790d8f7263234eca9509657ea0c37f0 Mon Sep 17 00:00:00 2001
From: Ikko Eltociear Ashimine <eltociear@gmail.com>
Date: Sun, 24 Sep 2023 23:31:21 +0900
Subject: [PATCH 147/288] Fix typo in dataclasses.py (#16173)

```
heirarchy -> hierarchy
```
---
 mypy/plugins/dataclasses.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py
index a51b393fcbc4..685d1b342055 100644
--- a/mypy/plugins/dataclasses.py
+++ b/mypy/plugins/dataclasses.py
@@ -730,7 +730,7 @@ def _freeze(self, attributes: list[DataclassAttribute]) -> None:
         for attr in attributes:
             # Classes that directly specify a dataclass_transform metaclass must be neither frozen
             # non non-frozen per PEP681. Though it is surprising, this means that attributes from
-            # such a class must be writable even if the rest of the class heirarchy is frozen. This
+            # such a class must be writable even if the rest of the class hierarchy is frozen. This
             # matches the behavior of Pyright (the reference implementation).
             if attr.is_neither_frozen_nor_nonfrozen:
                 continue

From 0c8b76195a773363721d5521653bcdf9989d8768 Mon Sep 17 00:00:00 2001
From: Ali Hamdan <ali.hamdan.dev@gmail.com>
Date: Mon, 25 Sep 2023 02:28:08 +0200
Subject: [PATCH 148/288] stubgen: multiple fixes to the generated imports
 (#15624)

* Fix handling of nested imports.
Instead of assuming that a name is imported from a top level package,
look in the imports for this name starting from the parent submodule up
until the import is found
* Fix "from imports" getting reexported unnecessarily
* Fix import sorting when having import aliases

Fixes #13661
Fixes #7006
---
 mypy/stubgen.py             | 24 ++++++++++-----
 test-data/unit/stubgen.test | 60 +++++++++++++++++++++++++++++++++++--
 2 files changed, 74 insertions(+), 10 deletions(-)

diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index ca7249465746..e8c12ee4d99b 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -496,7 +496,9 @@ def add_import(self, module: str, alias: str | None = None) -> None:
                 name = name.rpartition(".")[0]
 
     def require_name(self, name: str) -> None:
-        self.required_names.add(name.split(".")[0])
+        while name not in self.direct_imports and "." in name:
+            name = name.rsplit(".", 1)[0]
+        self.required_names.add(name)
 
     def reexport(self, name: str) -> None:
         """Mark a given non qualified name as needed in __all__.
@@ -516,7 +518,10 @@ def import_lines(self) -> list[str]:
         # be imported from it. the names can also be alias in the form 'original as alias'
         module_map: Mapping[str, list[str]] = defaultdict(list)
 
-        for name in sorted(self.required_names):
+        for name in sorted(
+            self.required_names,
+            key=lambda n: (self.reverse_alias[n], n) if n in self.reverse_alias else (n, ""),
+        ):
             # If we haven't seen this name in an import statement, ignore it
             if name not in self.module_for:
                 continue
@@ -540,7 +545,7 @@ def import_lines(self) -> list[str]:
                     assert "." not in name  # Because reexports only has nonqualified names
                     result.append(f"import {name} as {name}\n")
                 else:
-                    result.append(f"import {self.direct_imports[name]}\n")
+                    result.append(f"import {name}\n")
 
         # Now generate all the from ... import ... lines collected in module_map
         for module, names in sorted(module_map.items()):
@@ -595,7 +600,7 @@ def visit_name_expr(self, e: NameExpr) -> None:
         self.refs.add(e.name)
 
     def visit_instance(self, t: Instance) -> None:
-        self.add_ref(t.type.fullname)
+        self.add_ref(t.type.name)
         super().visit_instance(t)
 
     def visit_unbound_type(self, t: UnboundType) -> None:
@@ -614,7 +619,10 @@ def visit_callable_type(self, t: CallableType) -> None:
         t.ret_type.accept(self)
 
     def add_ref(self, fullname: str) -> None:
-        self.refs.add(fullname.split(".")[-1])
+        self.refs.add(fullname)
+        while "." in fullname:
+            fullname = fullname.rsplit(".", 1)[0]
+            self.refs.add(fullname)
 
 
 class StubGenerator(mypy.traverser.TraverserVisitor):
@@ -1295,6 +1303,7 @@ def visit_import_from(self, o: ImportFrom) -> None:
             if (
                 as_name is None
                 and name not in self.referenced_names
+                and not any(n.startswith(name + ".") for n in self.referenced_names)
                 and (not self._all_ or name in IGNORED_DUNDERS)
                 and not is_private
                 and module not in ("abc", "asyncio") + TYPING_MODULE_NAMES
@@ -1303,14 +1312,15 @@ def visit_import_from(self, o: ImportFrom) -> None:
                 # exported, unless there is an explicit __all__. Note that we need to special
                 # case 'abc' since some references are deleted during semantic analysis.
                 exported = True
-            top_level = full_module.split(".")[0]
+            top_level = full_module.split(".", 1)[0]
+            self_top_level = self.module.split(".", 1)[0]
             if (
                 as_name is None
                 and not self.export_less
                 and (not self._all_ or name in IGNORED_DUNDERS)
                 and self.module
                 and not is_private
-                and top_level in (self.module.split(".")[0], "_" + self.module.split(".")[0])
+                and top_level in (self_top_level, "_" + self_top_level)
             ):
                 # Export imports from the same package, since we can't reliably tell whether they
                 # are part of the public API.
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index 828680fadcf2..23dbf36a551b 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -2772,9 +2772,9 @@ y: b.Y
 z: p.a.X
 
 [out]
+import p.a
 import p.a as a
 import p.b as b
-import p.a
 
 x: a.X
 y: b.Y
@@ -2787,7 +2787,7 @@ from p import a
 x: a.X
 
 [out]
-from p import a as a
+from p import a
 
 x: a.X
 
@@ -2809,7 +2809,7 @@ from p import a
 x: a.X
 
 [out]
-from p import a as a
+from p import a
 
 x: a.X
 
@@ -2859,6 +2859,60 @@ import p.a
 x: a.X
 y: p.a.Y
 
+[case testNestedImports]
+import p
+import p.m1
+import p.m2
+
+x: p.X
+y: p.m1.Y
+z: p.m2.Z
+
+[out]
+import p
+import p.m1
+import p.m2
+
+x: p.X
+y: p.m1.Y
+z: p.m2.Z
+
+[case testNestedImportsAliased]
+import p as t
+import p.m1 as pm1
+import p.m2 as pm2
+
+x: t.X
+y: pm1.Y
+z: pm2.Z
+
+[out]
+import p as t
+import p.m1 as pm1
+import p.m2 as pm2
+
+x: t.X
+y: pm1.Y
+z: pm2.Z
+
+[case testNestedFromImports]
+from p import m1
+from p.m1 import sm1
+from p.m2 import sm2
+
+x: m1.X
+y: sm1.Y
+z: sm2.Z
+
+[out]
+from p import m1
+from p.m1 import sm1
+from p.m2 import sm2
+
+x: m1.X
+y: sm1.Y
+z: sm2.Z
+
 [case testOverload_fromTypingImport]
 from typing import Tuple, Union, overload
 

From 4b66fa9de07828621fee1d53abd533f3903e570a Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 27 Sep 2023 00:29:11 +0100
Subject: [PATCH 149/288] Special-case type inference of empty collections
 (#16122)

Fixes https://github.com/python/mypy/issues/230
Fixes https://github.com/python/mypy/issues/6463
I bet it fixes some other duplicates, I closed couple yesterday, but
likely there are more.

This may look a bit ad-hoc, but after some thinking this now starts to
make sense to me for two reasons:
* Unless I am missing something, this should be completely safe.
Special-casing only applies to inferred types (i.e. empty collection
literals etc).
* Empty collections _are_ actually special. Even if we solve some
classes of issues with more principled solutions (e.g. I want to re-work
type inference against unions in near future), there will always be some
corner cases involving empty collections.

Similar issues keep coming, so I think it is a good idea to add this
special-casing (especially taking into account how simple it is, and
that it closer some "popular" issues).
---
 mypy/solve.py                               | 14 ++++++++++++
 mypy/subtypes.py                            |  7 ++++++
 mypy/test/testpep561.py                     |  2 +-
 test-data/unit/check-inference-context.test | 11 ++--------
 test-data/unit/check-inference.test         | 24 +++++++++++++++++++++
 test-data/unit/check-varargs.test           | 24 ++++-----------------
 6 files changed, 52 insertions(+), 30 deletions(-)

diff --git a/mypy/solve.py b/mypy/solve.py
index 7cdf1c10c9b5..52e6549e98a6 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -239,6 +239,20 @@ def solve_one(lowers: Iterable[Type], uppers: Iterable[Type]) -> Type | None:
     top: Type | None = None
     candidate: Type | None = None
 
+    # Filter out previous results of failed inference, they will only spoil the current pass...
+    new_uppers = []
+    for u in uppers:
+        pu = get_proper_type(u)
+        if not isinstance(pu, UninhabitedType) or not pu.ambiguous:
+            new_uppers.append(u)
+    uppers = new_uppers
+
+    # ...unless this is the only information we have, then we just pass it on.
+    if not uppers and not lowers:
+        candidate = UninhabitedType()
+        candidate.ambiguous = True
+        return candidate
+
     # Process each bound separately, and calculate the lower and upper
     # bounds based on constraints. Note that we assume that the constraint
     # targets do not have constraint references.
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index c5399db0a494..822c4b0ebf32 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -18,6 +18,7 @@
     ARG_STAR2,
     CONTRAVARIANT,
     COVARIANT,
+    INVARIANT,
     Decorator,
     FuncBase,
     OverloadedFuncDef,
@@ -342,6 +343,12 @@ def _is_subtype(
 def check_type_parameter(
     left: Type, right: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext
 ) -> bool:
+    # It is safe to consider empty collection literals and similar as covariant, since
+    # such type can't be stored in a variable, see checker.is_valid_inferred_type().
+    if variance == INVARIANT:
+        p_left = get_proper_type(left)
+        if isinstance(p_left, UninhabitedType) and p_left.ambiguous:
+            variance = COVARIANT
     if variance == COVARIANT:
         if proper_subtype:
             return is_proper_subtype(left, right, subtype_context=subtype_context)
diff --git a/mypy/test/testpep561.py b/mypy/test/testpep561.py
index 48d0658cd1e9..9d2628c1fa5f 100644
--- a/mypy/test/testpep561.py
+++ b/mypy/test/testpep561.py
@@ -131,7 +131,7 @@ def test_pep561(testcase: DataDrivenTestCase) -> None:
 
         steps = testcase.find_steps()
         if steps != [[]]:
-            steps = [[]] + steps  # type: ignore[assignment]
+            steps = [[]] + steps
 
         for i, operations in enumerate(steps):
             perform_file_operations(operations)
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
index 169fee65f127..773a9ffd8274 100644
--- a/test-data/unit/check-inference-context.test
+++ b/test-data/unit/check-inference-context.test
@@ -1321,11 +1321,7 @@ from typing import List, TypeVar
 T = TypeVar('T', bound=int)
 def f(x: List[T]) -> List[T]: ...
 
-# TODO: improve error message for such cases, see #3283 and #5706
-y: List[str] = f([]) \
- # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[str]") \
- # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
- # N: Consider using "Sequence" instead, which is covariant
+y: List[str] = f([])
 [builtins fixtures/list.pyi]
 
 [case testWideOuterContextNoArgs]
@@ -1342,10 +1338,7 @@ from typing import TypeVar, Optional, List
 T = TypeVar('T', bound=int)
 def f(x: Optional[T] = None) -> List[T]: ...
 
-y: List[str] = f()  \
-      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[str]") \
-      # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
-      # N: Consider using "Sequence" instead, which is covariant
+y: List[str] = f()
 [builtins fixtures/list.pyi]
 
 [case testUseCovariantGenericOuterContext]
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index f9a4d58c74af..caa44cb40ad4 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -3686,3 +3686,27 @@ def g(*args: str) -> None: pass
 reveal_type(f(g))  # N: Revealed type is "Tuple[Never, Never]" \
                    # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[Never]"
 [builtins fixtures/list.pyi]
+
+[case testInferenceWorksWithEmptyCollectionsNested]
+from typing import List, TypeVar, NoReturn
+T = TypeVar('T')
+def f(a: List[T], b: List[T]) -> T: pass
+x = ["yes"]
+reveal_type(f(x, []))  # N: Revealed type is "builtins.str"
+reveal_type(f(["yes"], []))  # N: Revealed type is "builtins.str"
+
+empty: List[NoReturn]
+f(x, empty)  # E: Cannot infer type argument 1 of "f"
+f(["no"], empty)  # E: Cannot infer type argument 1 of "f"
+[builtins fixtures/list.pyi]
+
+[case testInferenceWorksWithEmptyCollectionsUnion]
+from typing import Any, Dict, NoReturn, NoReturn, Union
+
+def foo() -> Union[Dict[str, Any], Dict[int, Any]]:
+    return {}
+
+empty: Dict[NoReturn, NoReturn]
+def bar() -> Union[Dict[str, Any], Dict[int, Any]]:
+    return empty
+[builtins fixtures/dict.pyi]
diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test
index 41668e991972..2495a883aa71 100644
--- a/test-data/unit/check-varargs.test
+++ b/test-data/unit/check-varargs.test
@@ -602,31 +602,15 @@ class A: pass
 class B: pass
 
 if int():
-    a, aa = G().f(*[a]) \
-      # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A") \
-      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \
-      # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
-      # N: Consider using "Sequence" instead, which is covariant
-
+    a, aa = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A")
 if int():
     aa, a = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "A")
 if int():
-    ab, aa = G().f(*[a]) \
-      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \
-      # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
-      # N: Consider using "Sequence" instead, which is covariant \
-      # E: Argument 1 to "f" of "G" has incompatible type "*List[A]"; expected "B"
-
+    ab, aa = G().f(*[a]) # E: Argument 1 to "f" of "G" has incompatible type "*List[A]"; expected "B"
 if int():
-    ao, ao = G().f(*[a]) \
-      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[object]") \
-      # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
-      # N: Consider using "Sequence" instead, which is covariant
+    ao, ao = G().f(*[a])
 if int():
-    aa, aa = G().f(*[a]) \
-      # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \
-      # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \
-      # N: Consider using "Sequence" instead, which is covariant
+    aa, aa = G().f(*[a])
 [builtins fixtures/list.pyi]
 
 [case testCallerTupleVarArgsAndGenericCalleeVarArg]

From 5f6961b38acd7381ff3f8071f1f31db192cba368 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 27 Sep 2023 23:34:50 +0100
Subject: [PATCH 150/288] Use upper bounds as fallback solutions for inference
 (#16184)

Fixes https://github.com/python/mypy/issues/13220

This looks a bit ad-hoc, but it is probably the least disruptive
solution possible.
---
 mypy/solve.py                       | 35 +++++++++++++++++++++++++++++
 test-data/unit/check-inference.test |  8 +++++++
 2 files changed, 43 insertions(+)

diff --git a/mypy/solve.py b/mypy/solve.py
index 52e6549e98a6..4d0ca6b7af24 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -109,6 +109,13 @@ def solve_constraints(
             else:
                 candidate = AnyType(TypeOfAny.special_form)
             res.append(candidate)
+
+    if not free_vars:
+        # Most of the validation for solutions is done in applytype.py, but here we can
+        # quickly test solutions w.r.t. to upper bounds, and use the latter (if possible),
+        # if solutions are actually not valid (due to poor inference context).
+        res = pre_validate_solutions(res, original_vars, constraints)
+
     return res, free_vars
 
 
@@ -487,3 +494,31 @@ def check_linear(scc: set[TypeVarId], lowers: Bounds, uppers: Bounds) -> bool:
 def get_vars(target: Type, vars: list[TypeVarId]) -> set[TypeVarId]:
     """Find type variables for which we are solving in a target type."""
     return {tv.id for tv in get_all_type_vars(target)} & set(vars)
+
+
+def pre_validate_solutions(
+    solutions: list[Type | None],
+    original_vars: Sequence[TypeVarLikeType],
+    constraints: list[Constraint],
+) -> list[Type | None]:
+    """Check is each solution satisfies the upper bound of the corresponding type variable.
+
+    If it doesn't satisfy the bound, check if bound itself satisfies all constraints, and
+    if yes, use it instead as a fallback solution.
+    """
+    new_solutions: list[Type | None] = []
+    for t, s in zip(original_vars, solutions):
+        if s is not None and not is_subtype(s, t.upper_bound):
+            bound_satisfies_all = True
+            for c in constraints:
+                if c.op == SUBTYPE_OF and not is_subtype(t.upper_bound, c.target):
+                    bound_satisfies_all = False
+                    break
+                if c.op == SUPERTYPE_OF and not is_subtype(c.target, t.upper_bound):
+                    bound_satisfies_all = False
+                    break
+            if bound_satisfies_all:
+                new_solutions.append(t.upper_bound)
+                continue
+        new_solutions.append(s)
+    return new_solutions
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index caa44cb40ad4..348eb8b60076 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -3542,6 +3542,14 @@ T = TypeVar("T")
 def type_or_callable(value: T, tp: Union[Type[T], Callable[[int], T]]) -> T: ...
 reveal_type(type_or_callable(A("test"), A))  # N: Revealed type is "__main__.A"
 
+[case testUpperBoundAsInferenceFallback]
+from typing import Callable, TypeVar, Any, Mapping, Optional
+T = TypeVar("T", bound=Mapping[str, Any])
+def raises(opts: Optional[T]) -> T: pass
+def assertRaises(cb: Callable[..., object]) -> None: pass
+assertRaises(raises)  # OK
+[builtins fixtures/dict.pyi]
+
 [case testJoinWithAnyFallback]
 from unknown import X  # type: ignore[import]
 

From d25d68065c18a30d975685bd7a13cb7d085a200c Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 28 Sep 2023 00:27:13 +0100
Subject: [PATCH 151/288] Use type variable bound when it appears as actual
 during inference (#16178)

This should help with re-enabling the use of `ParamSpec` in
`functools.wraps` (as it looks like some of the new errors in
https://github.com/AlexWaygood/mypy/pull/11 are caused by not handling
this).

---------

Co-authored-by: hauntsaninja <hauntsaninja@gmail.com>
Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
---
 mypy/constraints.py                 | 12 ++++++++++++
 test-data/unit/check-inference.test | 30 +++++++++++++++++++++++++++++
 2 files changed, 42 insertions(+)

diff --git a/mypy/constraints.py b/mypy/constraints.py
index 0524e38f9643..b61d882da3c4 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -328,6 +328,18 @@ def _infer_constraints(
     if isinstance(template, TypeVarType):
         return [Constraint(template, direction, actual)]
 
+    if (
+        isinstance(actual, TypeVarType)
+        and not actual.id.is_meta_var()
+        and direction == SUPERTYPE_OF
+    ):
+        # Unless template is also a type variable (or a union that contains one), using the upper
+        # bound for inference will usually give better result for actual that is a type variable.
+        if not isinstance(template, UnionType) or not any(
+            isinstance(t, TypeVarType) for t in template.items
+        ):
+            actual = get_proper_type(actual.upper_bound)
+
     # Now handle the case of either template or actual being a Union.
     # For a Union to be a subtype of another type, every item of the Union
     # must be a subtype of that type, so concatenate the constraints.
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 348eb8b60076..0a95ffdd50cf 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -3695,6 +3695,36 @@ reveal_type(f(g))  # N: Revealed type is "Tuple[Never, Never]" \
                    # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[Never]"
 [builtins fixtures/list.pyi]
 
+[case testInferenceAgainstTypeVarActualBound]
+from typing import Callable, TypeVar
+
+T = TypeVar("T")
+S = TypeVar("S")
+def test(f: Callable[[T], S]) -> Callable[[T], S]: ...
+
+F = TypeVar("F", bound=Callable[..., object])
+def dec(f: F) -> F:
+    reveal_type(test(f))  # N: Revealed type is "def (Any) -> builtins.object"
+    return f
+
+[case testInferenceAgainstTypeVarActualUnionBound]
+from typing import Protocol, TypeVar, Union
+
+T_co = TypeVar("T_co", covariant=True)
+class SupportsFoo(Protocol[T_co]):
+    def foo(self) -> T_co: ...
+
+class A:
+    def foo(self) -> A: ...
+class B:
+    def foo(self) -> B: ...
+
+def foo(f: SupportsFoo[T_co]) -> T_co: ...
+
+ABT = TypeVar("ABT", bound=Union[A, B])
+def simpler(k: ABT):
+    foo(k)
+
 [case testInferenceWorksWithEmptyCollectionsNested]
 from typing import List, TypeVar, NoReturn
 T = TypeVar('T')

From 0291ec90d46655d47fcf220be7eab8b5f7c035e7 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 28 Sep 2023 22:32:36 +0100
Subject: [PATCH 152/288] Better support for variadic calls and indexing
 (#16131)

This improves support for two features that were supported but only
partially: variadic calls, and variadic indexing. Some notes:
* I did not add dedicated support for slicing of tuples with homogeneous
variadic items (except for cases covered by TypeVarTuple support, i.e.
those not involving splitting of variadic item). This is tricky and it
is not clear what cases people actually want. I left a TODO for this.
* I prohibit multiple variadic items in a call expression. Technically,
we can support some situations involving these, but this is tricky, and
prohibiting this would be in the "spirit" of the PEP, IMO.
* I may have still missed some cases for the calls, since there are so
many options. If you have ideas for additional test cases, please let me
know.
* It was necessary to fix overload ambiguity logic to make some tests
pass. This goes beyond TypeVarTuple support, but I think this is a
correct change.
---
 mypy/checkexpr.py                       | 156 ++++++++++++++++++++----
 mypy/constraints.py                     |  31 +++--
 mypy/erasetype.py                       |   4 +-
 mypy/message_registry.py                |   1 +
 mypy/types.py                           |  55 +++++++--
 test-data/unit/check-overloading.test   |   3 +-
 test-data/unit/check-tuples.test        |   5 +-
 test-data/unit/check-typevar-tuple.test | 108 ++++++++++++++--
 8 files changed, 306 insertions(+), 57 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 7b9b84938930..95ab75e24585 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -1640,6 +1640,27 @@ def check_callable_call(
                 callee.type_object().name, abstract_attributes, context
             )
 
+        var_arg = callee.var_arg()
+        if var_arg and isinstance(var_arg.typ, UnpackType):
+            # It is hard to support multiple variadic unpacks (except for old-style *args: int),
+            # fail gracefully to avoid crashes later.
+            seen_unpack = False
+            for arg, arg_kind in zip(args, arg_kinds):
+                if arg_kind != ARG_STAR:
+                    continue
+                arg_type = get_proper_type(self.accept(arg))
+                if not isinstance(arg_type, TupleType) or any(
+                    isinstance(t, UnpackType) for t in arg_type.items
+                ):
+                    if seen_unpack:
+                        self.msg.fail(
+                            "Passing multiple variadic unpacks in a call is not supported",
+                            context,
+                            code=codes.CALL_ARG,
+                        )
+                        return AnyType(TypeOfAny.from_error), callee
+                    seen_unpack = True
+
         formal_to_actual = map_actuals_to_formals(
             arg_kinds,
             arg_names,
@@ -2405,7 +2426,7 @@ def check_argument_types(
                     ]
                     actual_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1)
 
-                    # TODO: can we really assert this? What if formal is just plain Unpack[Ts]?
+                    # If we got here, the callee was previously inferred to have a suffix.
                     assert isinstance(orig_callee_arg_type, UnpackType)
                     assert isinstance(orig_callee_arg_type.type, ProperType) and isinstance(
                         orig_callee_arg_type.type, TupleType
@@ -2431,22 +2452,29 @@ def check_argument_types(
                             inner_unpack = unpacked_type.items[inner_unpack_index]
                             assert isinstance(inner_unpack, UnpackType)
                             inner_unpacked_type = get_proper_type(inner_unpack.type)
-                            # We assume heterogenous tuples are desugared earlier
-                            assert isinstance(inner_unpacked_type, Instance)
-                            assert inner_unpacked_type.type.fullname == "builtins.tuple"
-                            callee_arg_types = (
-                                unpacked_type.items[:inner_unpack_index]
-                                + [inner_unpacked_type.args[0]]
-                                * (len(actuals) - len(unpacked_type.items) + 1)
-                                + unpacked_type.items[inner_unpack_index + 1 :]
-                            )
-                            callee_arg_kinds = [ARG_POS] * len(actuals)
+                            if isinstance(inner_unpacked_type, TypeVarTupleType):
+                                # This branch mimics the expanded_tuple case above but for
+                                # the case where caller passed a single * unpacked tuple argument.
+                                callee_arg_types = unpacked_type.items
+                                callee_arg_kinds = [
+                                    ARG_POS if i != inner_unpack_index else ARG_STAR
+                                    for i in range(len(unpacked_type.items))
+                                ]
+                            else:
+                                # We assume heterogeneous tuples are desugared earlier.
+                                assert isinstance(inner_unpacked_type, Instance)
+                                assert inner_unpacked_type.type.fullname == "builtins.tuple"
+                                callee_arg_types = (
+                                    unpacked_type.items[:inner_unpack_index]
+                                    + [inner_unpacked_type.args[0]]
+                                    * (len(actuals) - len(unpacked_type.items) + 1)
+                                    + unpacked_type.items[inner_unpack_index + 1 :]
+                                )
+                                callee_arg_kinds = [ARG_POS] * len(actuals)
                     elif isinstance(unpacked_type, TypeVarTupleType):
                         callee_arg_types = [orig_callee_arg_type]
                         callee_arg_kinds = [ARG_STAR]
                     else:
-                        # TODO: Any and Never can appear in Unpack (as a result of user error),
-                        # fail gracefully here and elsewhere (and/or normalize them away).
                         assert isinstance(unpacked_type, Instance)
                         assert unpacked_type.type.fullname == "builtins.tuple"
                         callee_arg_types = [unpacked_type.args[0]] * len(actuals)
@@ -2458,8 +2486,10 @@ def check_argument_types(
             assert len(actual_types) == len(actuals) == len(actual_kinds)
 
             if len(callee_arg_types) != len(actual_types):
-                # TODO: Improve error message
-                self.chk.fail("Invalid number of arguments", context)
+                if len(actual_types) > len(callee_arg_types):
+                    self.chk.msg.too_many_arguments(callee, context)
+                else:
+                    self.chk.msg.too_few_arguments(callee, context, None)
                 continue
 
             assert len(callee_arg_types) == len(actual_types)
@@ -2764,11 +2794,17 @@ def infer_overload_return_type(
                     )
             is_match = not w.has_new_errors()
             if is_match:
-                # Return early if possible; otherwise record info so we can
+                # Return early if possible; otherwise record info, so we can
                 # check for ambiguity due to 'Any' below.
                 if not args_contain_any:
                     return ret_type, infer_type
-                matches.append(typ)
+                p_infer_type = get_proper_type(infer_type)
+                if isinstance(p_infer_type, CallableType):
+                    # Prefer inferred types if possible, this will avoid false triggers for
+                    # Any-ambiguity caused by arguments with Any passed to generic overloads.
+                    matches.append(p_infer_type)
+                else:
+                    matches.append(typ)
                 return_types.append(ret_type)
                 inferred_types.append(infer_type)
                 type_maps.append(m)
@@ -4109,6 +4145,12 @@ def visit_index_with_type(
         # Visit the index, just to make sure we have a type for it available
         self.accept(index)
 
+        if isinstance(left_type, TupleType) and any(
+            isinstance(it, UnpackType) for it in left_type.items
+        ):
+            # Normalize variadic tuples for consistency.
+            left_type = expand_type(left_type, {})
+
         if isinstance(left_type, UnionType):
             original_type = original_type or left_type
             # Don't combine literal types, since we may need them for type narrowing.
@@ -4129,12 +4171,15 @@ def visit_index_with_type(
             if ns is not None:
                 out = []
                 for n in ns:
-                    if n < 0:
-                        n += len(left_type.items)
-                    if 0 <= n < len(left_type.items):
-                        out.append(left_type.items[n])
+                    item = self.visit_tuple_index_helper(left_type, n)
+                    if item is not None:
+                        out.append(item)
                     else:
                         self.chk.fail(message_registry.TUPLE_INDEX_OUT_OF_RANGE, e)
+                        if any(isinstance(t, UnpackType) for t in left_type.items):
+                            self.chk.note(
+                                f"Variadic tuple can have length {left_type.length() - 1}", e
+                            )
                         return AnyType(TypeOfAny.from_error)
                 return make_simplified_union(out)
             else:
@@ -4158,6 +4203,46 @@ def visit_index_with_type(
             e.method_type = method_type
             return result
 
+    def visit_tuple_index_helper(self, left: TupleType, n: int) -> Type | None:
+        unpack_index = find_unpack_in_list(left.items)
+        if unpack_index is None:
+            if n < 0:
+                n += len(left.items)
+            if 0 <= n < len(left.items):
+                return left.items[n]
+            return None
+        unpack = left.items[unpack_index]
+        assert isinstance(unpack, UnpackType)
+        unpacked = get_proper_type(unpack.type)
+        if isinstance(unpacked, TypeVarTupleType):
+            # Usually we say that TypeVarTuple can't be split, be in case of
+            # indexing it seems benign to just return the fallback item, similar
+            # to what we do when indexing a regular TypeVar.
+            middle = unpacked.tuple_fallback.args[0]
+        else:
+            assert isinstance(unpacked, Instance)
+            assert unpacked.type.fullname == "builtins.tuple"
+            middle = unpacked.args[0]
+        if n >= 0:
+            if n < unpack_index:
+                return left.items[n]
+            if n >= len(left.items) - 1:
+                # For tuple[int, *tuple[str, ...], int] we allow either index 0 or 1,
+                # since variadic item may have zero items.
+                return None
+            return UnionType.make_union(
+                [middle] + left.items[unpack_index + 1 : n + 2], left.line, left.column
+            )
+        n += len(left.items)
+        if n <= 0:
+            # Similar to above, we only allow -1, and -2 for tuple[int, *tuple[str, ...], int]
+            return None
+        if n > unpack_index:
+            return left.items[n]
+        return UnionType.make_union(
+            left.items[n - 1 : unpack_index] + [middle], left.line, left.column
+        )
+
     def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type:
         begin: Sequence[int | None] = [None]
         end: Sequence[int | None] = [None]
@@ -4183,7 +4268,11 @@ def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Typ
 
         items: list[Type] = []
         for b, e, s in itertools.product(begin, end, stride):
-            items.append(left_type.slice(b, e, s))
+            item = left_type.slice(b, e, s)
+            if item is None:
+                self.chk.fail(message_registry.AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE, slic)
+                return AnyType(TypeOfAny.from_error)
+            items.append(item)
         return make_simplified_union(items)
 
     def try_getting_int_literals(self, index: Expression) -> list[int] | None:
@@ -4192,7 +4281,7 @@ def try_getting_int_literals(self, index: Expression) -> list[int] | None:
         Otherwise, returns None.
 
         Specifically, this function is guaranteed to return a list with
-        one or more ints if one one the following is true:
+        one or more ints if one the following is true:
 
         1. 'expr' is a IntExpr or a UnaryExpr backed by an IntExpr
         2. 'typ' is a LiteralType containing an int
@@ -4223,11 +4312,30 @@ def try_getting_int_literals(self, index: Expression) -> list[int] | None:
     def nonliteral_tuple_index_helper(self, left_type: TupleType, index: Expression) -> Type:
         self.check_method_call_by_name("__getitem__", left_type, [index], [ARG_POS], context=index)
         # We could return the return type from above, but unions are often better than the join
-        union = make_simplified_union(left_type.items)
+        union = self.union_tuple_fallback_item(left_type)
         if isinstance(index, SliceExpr):
             return self.chk.named_generic_type("builtins.tuple", [union])
         return union
 
+    def union_tuple_fallback_item(self, left_type: TupleType) -> Type:
+        # TODO: this duplicates logic in typeops.tuple_fallback().
+        items = []
+        for item in left_type.items:
+            if isinstance(item, UnpackType):
+                unpacked_type = get_proper_type(item.type)
+                if isinstance(unpacked_type, TypeVarTupleType):
+                    unpacked_type = get_proper_type(unpacked_type.upper_bound)
+                if (
+                    isinstance(unpacked_type, Instance)
+                    and unpacked_type.type.fullname == "builtins.tuple"
+                ):
+                    items.append(unpacked_type.args[0])
+                else:
+                    raise NotImplementedError
+            else:
+                items.append(item)
+        return make_simplified_union(items)
+
     def visit_typeddict_index_expr(
         self, td_type: TypedDictType, index: Expression, setitem: bool = False
     ) -> Type:
diff --git a/mypy/constraints.py b/mypy/constraints.py
index b61d882da3c4..ebd6765e8e82 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -137,25 +137,38 @@ def infer_constraints_for_callable(
             unpack_type = callee.arg_types[i]
             assert isinstance(unpack_type, UnpackType)
 
-            # In this case we are binding all of the actuals to *args
+            # In this case we are binding all the actuals to *args,
             # and we want a constraint that the typevar tuple being unpacked
             # is equal to a type list of all the actuals.
             actual_types = []
+
+            unpacked_type = get_proper_type(unpack_type.type)
+            if isinstance(unpacked_type, TypeVarTupleType):
+                tuple_instance = unpacked_type.tuple_fallback
+            elif isinstance(unpacked_type, TupleType):
+                tuple_instance = unpacked_type.partial_fallback
+            else:
+                assert False, "mypy bug: unhandled constraint inference case"
+
             for actual in actuals:
                 actual_arg_type = arg_types[actual]
                 if actual_arg_type is None:
                     continue
 
-                actual_types.append(
-                    mapper.expand_actual_type(
-                        actual_arg_type,
-                        arg_kinds[actual],
-                        callee.arg_names[i],
-                        callee.arg_kinds[i],
-                    )
+                expanded_actual = mapper.expand_actual_type(
+                    actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i]
                 )
 
-            unpacked_type = get_proper_type(unpack_type.type)
+                if arg_kinds[actual] != ARG_STAR or isinstance(
+                    get_proper_type(actual_arg_type), TupleType
+                ):
+                    actual_types.append(expanded_actual)
+                else:
+                    # If we are expanding an iterable inside * actual, append a homogeneous item instead
+                    actual_types.append(
+                        UnpackType(tuple_instance.copy_modified(args=[expanded_actual]))
+                    )
+
             if isinstance(unpacked_type, TypeVarTupleType):
                 constraints.append(
                     Constraint(
diff --git a/mypy/erasetype.py b/mypy/erasetype.py
index 24471f918319..7231ede66c65 100644
--- a/mypy/erasetype.py
+++ b/mypy/erasetype.py
@@ -82,7 +82,9 @@ def visit_instance(self, t: Instance) -> ProperType:
             # Valid erasure for *Ts is *tuple[Any, ...], not just Any.
             if isinstance(tv, TypeVarTupleType):
                 args.append(
-                    tv.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)])
+                    UnpackType(
+                        tv.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)])
+                    )
                 )
             else:
                 args.append(AnyType(TypeOfAny.special_form))
diff --git a/mypy/message_registry.py b/mypy/message_registry.py
index 713ec2e3c759..d75a1fab1b66 100644
--- a/mypy/message_registry.py
+++ b/mypy/message_registry.py
@@ -83,6 +83,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage:
 INCOMPATIBLE_TYPES_IN_CAPTURE: Final = ErrorMessage("Incompatible types in capture pattern")
 MUST_HAVE_NONE_RETURN_TYPE: Final = ErrorMessage('The return type of "{}" must be None')
 TUPLE_INDEX_OUT_OF_RANGE: Final = ErrorMessage("Tuple index out of range")
+AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE: Final = ErrorMessage("Ambiguous slice of a variadic tuple")
 INVALID_SLICE_INDEX: Final = ErrorMessage("Slice index must be an integer, SupportsIndex or None")
 CANNOT_INFER_LAMBDA_TYPE: Final = ErrorMessage("Cannot infer type of lambda")
 CANNOT_ACCESS_INIT: Final = (
diff --git a/mypy/types.py b/mypy/types.py
index 2b5aec7789f7..9817043db6c2 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -2416,14 +2416,53 @@ def copy_modified(
             items = self.items
         return TupleType(items, fallback, self.line, self.column)
 
-    def slice(self, begin: int | None, end: int | None, stride: int | None) -> TupleType:
-        return TupleType(
-            self.items[begin:end:stride],
-            self.partial_fallback,
-            self.line,
-            self.column,
-            self.implicit,
-        )
+    def slice(self, begin: int | None, end: int | None, stride: int | None) -> TupleType | None:
+        if any(isinstance(t, UnpackType) for t in self.items):
+            total = len(self.items)
+            unpack_index = find_unpack_in_list(self.items)
+            assert unpack_index is not None
+            if begin is None and end is None:
+                # We special-case this to support reversing variadic tuples.
+                # General support for slicing is tricky, so we handle only simple cases.
+                if stride == -1:
+                    slice_items = self.items[::-1]
+                elif stride is None or stride == 1:
+                    slice_items = self.items
+                else:
+                    return None
+            elif (begin is None or unpack_index >= begin >= 0) and (
+                end is not None and unpack_index >= end >= 0
+            ):
+                # Start and end are in the prefix, everything works in this case.
+                slice_items = self.items[begin:end:stride]
+            elif (begin is not None and unpack_index - total < begin < 0) and (
+                end is None or unpack_index - total < end < 0
+            ):
+                # Start and end are in the suffix, everything works in this case.
+                slice_items = self.items[begin:end:stride]
+            elif (begin is None or unpack_index >= begin >= 0) and (
+                end is None or unpack_index - total < end < 0
+            ):
+                # Start in the prefix, end in the suffix, we can support only trivial strides.
+                if stride is None or stride == 1:
+                    slice_items = self.items[begin:end:stride]
+                else:
+                    return None
+            elif (begin is not None and unpack_index - total < begin < 0) and (
+                end is not None and unpack_index >= end >= 0
+            ):
+                # Start in the suffix, end in the prefix, we can support only trivial strides.
+                if stride is None or stride == -1:
+                    slice_items = self.items[begin:end:stride]
+                else:
+                    return None
+            else:
+                # TODO: there some additional cases we can support for homogeneous variadic
+                # items, we can "eat away" finite number of items.
+                return None
+        else:
+            slice_items = self.items[begin:end:stride]
+        return TupleType(slice_items, self.partial_fallback, self.line, self.column, self.implicit)
 
 
 class TypedDictType(ProperType):
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index 443a6fb5cb10..b97eeb48115c 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -6501,8 +6501,7 @@ eggs = lambda: 'eggs'
 reveal_type(func(eggs))  # N: Revealed type is "def (builtins.str) -> builtins.str"
 
 spam: Callable[..., str] = lambda x, y: 'baz'
-reveal_type(func(spam))  # N: Revealed type is "def (*Any, **Any) -> Any"
-
+reveal_type(func(spam))  # N: Revealed type is "def (*Any, **Any) -> builtins.str"
 [builtins fixtures/paramspec.pyi]
 
 [case testGenericOverloadOverlapWithType]
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index ed2c3550a04e..9dfee38bc0c6 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -1678,7 +1678,6 @@ def zip(*i: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ...
 def zip(i): ...
 
 def g(t: Tuple):
-    # Ideally, we'd infer that these are iterators of tuples
-    reveal_type(zip(*t))  # N: Revealed type is "typing.Iterator[Any]"
-    reveal_type(zip(t))  # N: Revealed type is "typing.Iterator[Any]"
+    reveal_type(zip(*t))  # N: Revealed type is "typing.Iterator[builtins.tuple[Any, ...]]"
+    reveal_type(zip(t))  # N: Revealed type is "typing.Iterator[Tuple[Any]]"
 [builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index d38d492fe9b2..e8d7966029e3 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -366,13 +366,25 @@ from typing_extensions import TypeVarTuple, Unpack
 
 Ts = TypeVarTuple("Ts")
 
-# TODO: add less trivial tests with prefix/suffix etc.
-# TODO: add tests that call with a type var tuple instead of just args.
 def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]:
     reveal_type(args)  # N: Revealed type is "Tuple[Unpack[Ts`-1]]"
-    return args
+    reveal_type(args_to_tuple(1, *args))  # N: Revealed type is "Tuple[Literal[1]?, Unpack[Ts`-1]]"
+    reveal_type(args_to_tuple(*args, 'a'))  # N: Revealed type is "Tuple[Unpack[Ts`-1], Literal['a']?]"
+    reveal_type(args_to_tuple(1, *args, 'a'))  # N: Revealed type is "Tuple[Literal[1]?, Unpack[Ts`-1], Literal['a']?]"
+    args_to_tuple(*args, *args)  # E: Passing multiple variadic unpacks in a call is not supported
+    ok = (1, 'a')
+    reveal_type(args_to_tuple(*ok, *ok))  # N: Revealed type is "Tuple[builtins.int, builtins.str, builtins.int, builtins.str]"
+    if int():
+        return args
+    else:
+        return args_to_tuple(*args)
 
 reveal_type(args_to_tuple(1, 'a'))  # N: Revealed type is "Tuple[Literal[1]?, Literal['a']?]"
+vt: Tuple[int, ...]
+reveal_type(args_to_tuple(1, *vt))  # N: Revealed type is "Tuple[Literal[1]?, Unpack[builtins.tuple[builtins.int, ...]]]"
+reveal_type(args_to_tuple(*vt, 'a'))  # N: Revealed type is "Tuple[Unpack[builtins.tuple[builtins.int, ...]], Literal['a']?]"
+reveal_type(args_to_tuple(1, *vt, 'a'))  # N: Revealed type is "Tuple[Literal[1]?, Unpack[builtins.tuple[builtins.int, ...]], Literal['a']?]"
+args_to_tuple(*vt, *vt)  # E: Passing multiple variadic unpacks in a call is not supported
 [builtins fixtures/tuple.pyi]
 
 [case testTypeVarTuplePep646TypeVarStarArgs]
@@ -381,8 +393,17 @@ from typing_extensions import TypeVarTuple, Unpack
 
 Ts = TypeVarTuple("Ts")
 
+def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]:
+    with_prefix_suffix(*args)  # E: Too few arguments for "with_prefix_suffix" \
+                               # E: Argument 1 to "with_prefix_suffix" has incompatible type "*Tuple[Unpack[Ts]]"; expected "bool"
+    new_args = (True, "foo", *args, 5)
+    with_prefix_suffix(*new_args)
+    return args
+
 def with_prefix_suffix(*args: Unpack[Tuple[bool, str, Unpack[Ts], int]]) -> Tuple[bool, str, Unpack[Ts], int]:
     reveal_type(args)  # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]"
+    reveal_type(args_to_tuple(*args))  # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]"
+    reveal_type(args_to_tuple(1, *args, 'a'))  # N: Revealed type is "Tuple[Literal[1]?, builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int, Literal['a']?]"
     return args
 
 reveal_type(with_prefix_suffix(True, "bar", "foo", 5))  # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]"
@@ -395,8 +416,7 @@ t = (True, "bar", "foo", 5)
 reveal_type(with_prefix_suffix(*t))  # N: Revealed type is "Tuple[builtins.bool, builtins.str, builtins.str, builtins.int]"
 reveal_type(with_prefix_suffix(True, *("bar", "foo"), 5))  # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]"
 
-# TODO: handle list case
-#reveal_type(with_prefix_suffix(True, "bar", *["foo1", "foo2"], 5))
+reveal_type(with_prefix_suffix(True, "bar", *["foo1", "foo2"], 5))  # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[builtins.tuple[builtins.str, ...]], builtins.int]"
 
 bad_t = (True, "bar")
 with_prefix_suffix(*bad_t)  # E: Too few arguments for "with_prefix_suffix"
@@ -434,7 +454,7 @@ reveal_type(C().foo2)  # N: Revealed type is "def (*args: Unpack[Tuple[builtins.
 
 [case testTypeVarTuplePep646TypeVarStarArgsVariableLengthTuple]
 from typing import Tuple
-from typing_extensions import Unpack
+from typing_extensions import Unpack, TypeVarTuple
 
 def foo(*args: Unpack[Tuple[int, ...]]) -> None:
     reveal_type(args)  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
@@ -442,11 +462,28 @@ def foo(*args: Unpack[Tuple[int, ...]]) -> None:
 foo(0, 1, 2)
 foo(0, 1, "bar")  # E: Argument 3 to "foo" has incompatible type "str"; expected "int"
 
-
 def foo2(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]) -> None:
     reveal_type(args)  # N: Revealed type is "Tuple[builtins.str, Unpack[builtins.tuple[builtins.int, ...]], builtins.bool, builtins.bool]"
-    # TODO: generate an error
-    # reveal_type(args[1])
+    reveal_type(args[1])  # N: Revealed type is "builtins.int"
+
+def foo3(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], str, float]]) -> None:
+    reveal_type(args[0])  # N: Revealed type is "builtins.str"
+    reveal_type(args[1])  # N: Revealed type is "Union[builtins.int, builtins.str]"
+    reveal_type(args[2])  # N: Revealed type is "Union[builtins.int, builtins.str, builtins.float]"
+    args[3]  # E: Tuple index out of range \
+             # N: Variadic tuple can have length 3
+    reveal_type(args[-1])  # N: Revealed type is "builtins.float"
+    reveal_type(args[-2])  # N: Revealed type is "builtins.str"
+    reveal_type(args[-3])  # N: Revealed type is "Union[builtins.str, builtins.int]"
+    args[-4]  # E: Tuple index out of range \
+              # N: Variadic tuple can have length 3
+    reveal_type(args[::-1])  # N: Revealed type is "Tuple[builtins.float, builtins.str, Unpack[builtins.tuple[builtins.int, ...]], builtins.str]"
+    args[::2]  # E: Ambiguous slice of a variadic tuple
+    args[:2]  # E: Ambiguous slice of a variadic tuple
+
+Ts = TypeVarTuple("Ts")
+def foo4(*args: Unpack[Tuple[str, Unpack[Ts], bool, bool]]) -> None:
+    reveal_type(args[1])  # N: Revealed type is "builtins.object"
 
 foo2("bar", 1, 2, 3, False, True)
 foo2(0, 1, 2, 3, False, True)  # E: Argument 1 to "foo2" has incompatible type "int"; expected "str"
@@ -908,7 +945,7 @@ def cons(
     return wrapped
 
 def star(f: Callable[[X], Y]) -> Callable[[Unpack[Tuple[X, ...]]], Tuple[Y, ...]]:
-    def wrapped(*xs: X):
+    def wrapped(*xs: X) -> Tuple[Y, ...]:
         if not xs:
             return nil()
         return cons(f, star(f))(*xs)
@@ -1516,3 +1553,54 @@ def test(x: int, t: Tuple[T, ...]) -> Tuple[int, Unpack[Tuple[T, ...]]]:
     ...
 a: Any = test(42, ())
 [builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleIndexTypeVar]
+from typing import Any, List, Sequence, Tuple, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+def f(data: Sequence[Tuple[Unpack[Ts]]]) -> List[Any]:
+    return [d[0] for d in data]  # E: Tuple index out of range \
+                                 # N: Variadic tuple can have length 0
+
+T = TypeVar("T")
+def g(data: Sequence[Tuple[T, Unpack[Ts]]]) -> List[T]:
+    return [d[0] for d in data]  # OK
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleOverloadMatch]
+from typing import Any, Generic, overload, Tuple, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+_Ts = TypeVarTuple("_Ts")
+_T = TypeVar("_T")
+_T2 = TypeVar("_T2")
+
+class Container(Generic[_T]): ...
+class Array(Generic[Unpack[_Ts]]): ...
+
+@overload
+def build(entity: Container[_T], /) -> Array[_T]: ...
+@overload
+def build(entity: Container[_T], entity2: Container[_T2], /) -> Array[_T, _T2]: ...
+@overload
+def build(*entities: Container[Any]) -> Array[Unpack[Tuple[Any, ...]]]: ...
+def build(*entities: Container[Any]) -> Array[Unpack[Tuple[Any, ...]]]:
+    ...
+
+def test(a: Container[Any], b: Container[int], c: Container[str]):
+    reveal_type(build(a, b))  # N: Revealed type is "__main__.Array[Any, builtins.int]"
+    reveal_type(build(b, c))  # N: Revealed type is "__main__.Array[builtins.int, builtins.str]"
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleIndexOldStyleNonNormalizedAndNonLiteral]
+from typing import Any, Tuple
+from typing_extensions import Unpack
+
+t: Tuple[Unpack[Tuple[int, ...]]]
+reveal_type(t[42])  # N: Revealed type is "builtins.int"
+i: int
+reveal_type(t[i])  # N: Revealed type is "builtins.int"
+t1: Tuple[int, Unpack[Tuple[int, ...]]]
+reveal_type(t1[i])  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]

From fddfc8dfb29ef9adec02f46eda8e92f74bdd7c9c Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 28 Sep 2023 23:02:38 +0100
Subject: [PATCH 153/288] Fix walrus interaction with empty collections
 (#16197)

This fixes a regression caused by
https://github.com/python/mypy/pull/16122
---
 mypy/checkexpr.py                  | 5 ++++-
 test-data/unit/check-python38.test | 8 ++++++++
 2 files changed, 12 insertions(+), 1 deletion(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 95ab75e24585..c132b35e5a2a 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -4093,7 +4093,10 @@ def visit_assignment_expr(self, e: AssignmentExpr) -> Type:
         value = self.accept(e.value)
         self.chk.check_assignment(e.target, e.value)
         self.chk.check_final(e)
-        self.chk.store_type(e.target, value)
+        if not has_uninhabited_component(value):
+            # TODO: can we get rid of this extra store_type()?
+            # Usually, check_assignment() already stores the lvalue type correctly.
+            self.chk.store_type(e.target, value)
         self.find_partial_type_ref_fast_path(e.target)
         return value
 
diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test
index d83f29f2186a..1e99c760b67a 100644
--- a/test-data/unit/check-python38.test
+++ b/test-data/unit/check-python38.test
@@ -826,3 +826,11 @@ main:5: error: Dict entry 0 has incompatible type "str": "str"; expected "str":
 main:5: error: Unpacked dict entry 1 has incompatible type "Dict[str, str]"; expected "SupportsKeysAndGetItem[str, int]"
     dct: Dict[str, int] = {"a": "b", **other}
                                        ^~~~~
+
+[case testWalrusAssignmentEmptyCollection]
+from typing import List
+
+y: List[int]
+if (y := []):
+    reveal_type(y)  # N: Revealed type is "builtins.list[builtins.int]"
+[builtins fixtures/list.pyi]

From 181cbe88f1396f2f52770f59b6bbb13c6521980a Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 29 Sep 2023 00:42:38 +0100
Subject: [PATCH 154/288] Add more tests for variadic Callables (#16198)

Supersedes https://github.com/python/mypy/pull/15254

Note the error message for one of the test is slightly different.
Although it _may_ suggest that `Unpack[...]` is a valid type on its own,
this error is kind of more consistent with old style `*args: int`
annotations.
---
 test-data/unit/check-typevar-tuple.test | 49 +++++++++++++++++++++++++
 1 file changed, 49 insertions(+)

diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index e8d7966029e3..850b7ef8a524 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -1604,3 +1604,52 @@ reveal_type(t[i])  # N: Revealed type is "builtins.int"
 t1: Tuple[int, Unpack[Tuple[int, ...]]]
 reveal_type(t1[i])  # N: Revealed type is "builtins.int"
 [builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleNotConcreteCallable]
+from typing_extensions import Unpack, TypeVarTuple
+from typing import Callable, TypeVar, Tuple
+
+T = TypeVar("T")
+Args = TypeVarTuple("Args")
+Args2 = TypeVarTuple("Args2")
+
+def submit(fn: Callable[[Unpack[Args]], T], *args: Unpack[Args]) -> T:
+    ...
+
+def submit2(fn: Callable[[int, Unpack[Args]], T], *args: Unpack[Tuple[int, Unpack[Args]]]) -> T:
+    ...
+
+def foo(func: Callable[[Unpack[Args]], T], *args: Unpack[Args]) -> T:
+   return submit(func, *args)
+
+def foo2(func: Callable[[Unpack[Args2]], T], *args: Unpack[Args2]) -> T:
+   return submit(func, *args)
+
+def foo3(func: Callable[[int, Unpack[Args2]], T], *args: Unpack[Args2]) -> T:
+   return submit2(func, 1, *args)
+
+def foo_bad(func: Callable[[Unpack[Args2]], T], *args: Unpack[Args2]) -> T:
+   return submit2(func, 1, *args)  # E: Argument 1 to "submit2" has incompatible type "Callable[[VarArg(Unpack[Args2])], T]"; expected "Callable[[int, VarArg(Unpack[Args2])], T]"
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleParamSpecInteraction]
+from typing_extensions import Unpack, TypeVarTuple, ParamSpec
+from typing import Callable, TypeVar
+
+T = TypeVar("T")
+Args = TypeVarTuple("Args")
+Args2 = TypeVarTuple("Args2")
+P = ParamSpec("P")
+
+def submit(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T:
+    ...
+
+def foo(func: Callable[[Unpack[Args]], T], *args: Unpack[Args]) -> T:
+   return submit(func, *args)
+
+def foo2(func: Callable[[Unpack[Args]], T], *args: Unpack[Args2]) -> T:
+   return submit(func, *args)  # E: Argument 2 to "submit" has incompatible type "*Tuple[Unpack[Args2]]"; expected "Unpack[Args]"
+
+def foo3(func: Callable[[int, Unpack[Args2]], T], *args: Unpack[Args2]) -> T:
+   return submit(func, 1, *args)
+[builtins fixtures/tuple.pyi]

From acccdd8a25b019e6b08180e2f95417a29651435e Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra <jelle.zijlstra@gmail.com>
Date: Sat, 30 Sep 2023 15:13:09 -0700
Subject: [PATCH 155/288] Fix error code on "Maybe you forgot to use await"
 note (#16203)

Fixes #16202
---
 mypy/checker.py                       |  6 +++---
 mypy/checkexpr.py                     |  2 +-
 mypy/checkmember.py                   |  6 +++---
 mypy/messages.py                      | 26 ++++++++++++++++++++------
 test-data/unit/check-async-await.test | 27 +++++++++++++++++++++++++++
 5 files changed, 54 insertions(+), 13 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 95a65b0a8cd1..bdb636541db0 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -6237,7 +6237,7 @@ def check_subtype(
                 assert call is not None
                 if not is_subtype(subtype, call, options=self.options):
                     self.msg.note_call(supertype, call, context, code=msg.code)
-        self.check_possible_missing_await(subtype, supertype, context)
+        self.check_possible_missing_await(subtype, supertype, context, code=msg.code)
         return False
 
     def get_precise_awaitable_type(self, typ: Type, local_errors: ErrorWatcher) -> Type | None:
@@ -6271,7 +6271,7 @@ def checking_await_set(self) -> Iterator[None]:
             self.checking_missing_await = False
 
     def check_possible_missing_await(
-        self, subtype: Type, supertype: Type, context: Context
+        self, subtype: Type, supertype: Type, context: Context, code: ErrorCode | None
     ) -> None:
         """Check if the given type becomes a subtype when awaited."""
         if self.checking_missing_await:
@@ -6285,7 +6285,7 @@ def check_possible_missing_await(
                 aw_type, supertype, context, msg=message_registry.INCOMPATIBLE_TYPES
             ):
                 return
-        self.msg.possible_missing_await(context)
+        self.msg.possible_missing_await(context, code)
 
     def contains_none(self, t: Type) -> bool:
         t = get_proper_type(t)
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index c132b35e5a2a..df4077100efb 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -2563,7 +2563,7 @@ def check_arg(
                 original_caller_type, callee_type, context, code=code
             )
             if not self.msg.prefer_simple_messages():
-                self.chk.check_possible_missing_await(caller_type, callee_type, context)
+                self.chk.check_possible_missing_await(caller_type, callee_type, context, code)
 
     def check_overload_call(
         self,
diff --git a/mypy/checkmember.py b/mypy/checkmember.py
index 1557b62917dc..5a4f3875ad04 100644
--- a/mypy/checkmember.py
+++ b/mypy/checkmember.py
@@ -272,11 +272,11 @@ def report_missing_attribute(
     mx: MemberContext,
     override_info: TypeInfo | None = None,
 ) -> Type:
-    res_type = mx.msg.has_no_attr(original_type, typ, name, mx.context, mx.module_symbol_table)
+    error_code = mx.msg.has_no_attr(original_type, typ, name, mx.context, mx.module_symbol_table)
     if not mx.msg.prefer_simple_messages():
         if may_be_awaitable_attribute(name, typ, mx, override_info):
-            mx.msg.possible_missing_await(mx.context)
-    return res_type
+            mx.msg.possible_missing_await(mx.context, error_code)
+    return AnyType(TypeOfAny.from_error)
 
 
 # The several functions that follow implement analyze_member_access for various
diff --git a/mypy/messages.py b/mypy/messages.py
index 8bc190b7d66d..47ebd94f3d21 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -355,7 +355,7 @@ def has_no_attr(
         member: str,
         context: Context,
         module_symbol_table: SymbolTable | None = None,
-    ) -> Type:
+    ) -> ErrorCode | None:
         """Report a missing or non-accessible member.
 
         original_type is the top-level type on which the error occurred.
@@ -370,44 +370,49 @@ def has_no_attr(
         directly available on original_type
 
         If member corresponds to an operator, use the corresponding operator
-        name in the messages. Return type Any.
+        name in the messages. Return the error code that was produced, if any.
         """
         original_type = get_proper_type(original_type)
         typ = get_proper_type(typ)
 
         if isinstance(original_type, Instance) and original_type.type.has_readable_member(member):
             self.fail(f'Member "{member}" is not assignable', context)
+            return None
         elif member == "__contains__":
             self.fail(
                 f"Unsupported right operand type for in ({format_type(original_type, self.options)})",
                 context,
                 code=codes.OPERATOR,
             )
+            return codes.OPERATOR
         elif member in op_methods.values():
             # Access to a binary operator member (e.g. _add). This case does
             # not handle indexing operations.
             for op, method in op_methods.items():
                 if method == member:
                     self.unsupported_left_operand(op, original_type, context)
-                    break
+                    return codes.OPERATOR
         elif member == "__neg__":
             self.fail(
                 f"Unsupported operand type for unary - ({format_type(original_type, self.options)})",
                 context,
                 code=codes.OPERATOR,
             )
+            return codes.OPERATOR
         elif member == "__pos__":
             self.fail(
                 f"Unsupported operand type for unary + ({format_type(original_type, self.options)})",
                 context,
                 code=codes.OPERATOR,
             )
+            return codes.OPERATOR
         elif member == "__invert__":
             self.fail(
                 f"Unsupported operand type for ~ ({format_type(original_type, self.options)})",
                 context,
                 code=codes.OPERATOR,
             )
+            return codes.OPERATOR
         elif member == "__getitem__":
             # Indexed get.
             # TODO: Fix this consistently in format_type
@@ -418,12 +423,14 @@ def has_no_attr(
                     ),
                     context,
                 )
+                return None
             else:
                 self.fail(
                     f"Value of type {format_type(original_type, self.options)} is not indexable",
                     context,
                     code=codes.INDEX,
                 )
+                return codes.INDEX
         elif member == "__setitem__":
             # Indexed set.
             self.fail(
@@ -433,6 +440,7 @@ def has_no_attr(
                 context,
                 code=codes.INDEX,
             )
+            return codes.INDEX
         elif member == "__call__":
             if isinstance(original_type, Instance) and (
                 original_type.type.fullname == "builtins.function"
@@ -440,12 +448,14 @@ def has_no_attr(
                 # "'function' not callable" is a confusing error message.
                 # Explain that the problem is that the type of the function is not known.
                 self.fail("Cannot call function of unknown type", context, code=codes.OPERATOR)
+                return codes.OPERATOR
             else:
                 self.fail(
                     message_registry.NOT_CALLABLE.format(format_type(original_type, self.options)),
                     context,
                     code=codes.OPERATOR,
                 )
+                return codes.OPERATOR
         else:
             # The non-special case: a missing ordinary attribute.
             extra = ""
@@ -501,6 +511,7 @@ def has_no_attr(
                         context,
                         code=codes.ATTR_DEFINED,
                     )
+                return codes.ATTR_DEFINED
             elif isinstance(original_type, UnionType):
                 # The checker passes "object" in lieu of "None" for attribute
                 # checks, so we manually convert it back.
@@ -518,6 +529,7 @@ def has_no_attr(
                     context,
                     code=codes.UNION_ATTR,
                 )
+                return codes.UNION_ATTR
             elif isinstance(original_type, TypeVarType):
                 bound = get_proper_type(original_type.upper_bound)
                 if isinstance(bound, UnionType):
@@ -531,6 +543,7 @@ def has_no_attr(
                         context,
                         code=codes.UNION_ATTR,
                     )
+                    return codes.UNION_ATTR
             else:
                 self.fail(
                     '{} has no attribute "{}"{}'.format(
@@ -539,7 +552,8 @@ def has_no_attr(
                     context,
                     code=codes.ATTR_DEFINED,
                 )
-        return AnyType(TypeOfAny.from_error)
+                return codes.ATTR_DEFINED
+        return None
 
     def unsupported_operand_types(
         self,
@@ -1107,8 +1121,8 @@ def unpacking_strings_disallowed(self, context: Context) -> None:
     def type_not_iterable(self, type: Type, context: Context) -> None:
         self.fail(f"{format_type(type, self.options)} object is not iterable", context)
 
-    def possible_missing_await(self, context: Context) -> None:
-        self.note('Maybe you forgot to use "await"?', context)
+    def possible_missing_await(self, context: Context, code: ErrorCode | None) -> None:
+        self.note('Maybe you forgot to use "await"?', context, code=code)
 
     def incompatible_operator_assignment(self, op: str, context: Context) -> None:
         self.fail(f"Result type of {op} incompatible in assignment", context)
diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test
index 7afdbd687135..f0fa206645dd 100644
--- a/test-data/unit/check-async-await.test
+++ b/test-data/unit/check-async-await.test
@@ -165,6 +165,33 @@ async def f() -> None:
 [out]
 main:4: error: "List[int]" has no attribute "__aiter__" (not async iterable)
 
+[case testAsyncForErrorNote]
+
+from typing import AsyncIterator, AsyncGenerator
+async def g() -> AsyncGenerator[str, None]:
+    pass
+
+async def f() -> None:
+    async for x in g():
+        pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-async.pyi]
+[out]
+main:7: error: "Coroutine[Any, Any, AsyncGenerator[str, None]]" has no attribute "__aiter__" (not async iterable)
+main:7: note: Maybe you forgot to use "await"?
+
+[case testAsyncForErrorCanBeIgnored]
+
+from typing import AsyncIterator, AsyncGenerator
+async def g() -> AsyncGenerator[str, None]:
+    pass
+
+async def f() -> None:
+    async for x in g():  # type: ignore[attr-defined]
+        pass
+[builtins fixtures/async_await.pyi]
+[typing fixtures/typing-async.pyi]
+
 [case testAsyncForTypeComments]
 
 from typing import AsyncIterator, Union

From 7a62481c4ed4007a0323118d3e1b8727b2136434 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
 <41898282+github-actions[bot]@users.noreply.github.com>
Date: Sun, 1 Oct 2023 02:05:55 -0700
Subject: [PATCH 156/288] Sync typeshed (#16206)

Source commit:

https://github.com/python/typeshed/commit/559d31c4a33045310a30843dd7fac88a62cc5915
---
 mypy/typeshed/stdlib/_ctypes.pyi              |  6 ++
 mypy/typeshed/stdlib/_curses.pyi              | 23 ++---
 mypy/typeshed/stdlib/_posixsubprocess.pyi     | 16 +--
 mypy/typeshed/stdlib/_typeshed/__init__.pyi   |  8 +-
 mypy/typeshed/stdlib/abc.pyi                  |  3 +-
 mypy/typeshed/stdlib/ast.pyi                  |  8 ++
 mypy/typeshed/stdlib/builtins.pyi             | 74 ++++++++------
 mypy/typeshed/stdlib/codecs.pyi               |  8 +-
 mypy/typeshed/stdlib/collections/__init__.pyi |  9 ++
 mypy/typeshed/stdlib/fcntl.pyi                |  5 +
 mypy/typeshed/stdlib/http/server.pyi          |  1 +
 mypy/typeshed/stdlib/logging/handlers.pyi     |  2 +
 mypy/typeshed/stdlib/mmap.pyi                 |  2 +
 mypy/typeshed/stdlib/multiprocessing/util.pyi | 12 +--
 mypy/typeshed/stdlib/os/__init__.pyi          | 98 +++++++++++++++----
 mypy/typeshed/stdlib/posix.pyi                | 42 ++++++++
 mypy/typeshed/stdlib/resource.pyi             |  9 +-
 mypy/typeshed/stdlib/signal.pyi               |  8 +-
 mypy/typeshed/stdlib/ssl.pyi                  |  2 -
 mypy/typeshed/stdlib/sys.pyi                  |  5 +-
 mypy/typeshed/stdlib/syslog.pyi               |  6 +-
 mypy/typeshed/stdlib/termios.pyi              | 10 +-
 mypy/typeshed/stdlib/tty.pyi                  | 15 ++-
 mypy/typeshed/stdlib/types.pyi                | 40 ++++----
 mypy/typeshed/stdlib/typing.pyi               |  4 +-
 mypy/typeshed/stdlib/typing_extensions.pyi    |  7 ++
 mypy/typeshed/stdlib/unittest/case.pyi        |  4 +-
 mypy/typeshed/stdlib/xml/sax/__init__.pyi     | 19 ++--
 mypy/typeshed/stdlib/xml/sax/handler.pyi      | 27 ++---
 mypy/typeshed/stdlib/xml/sax/saxutils.pyi     | 64 ++++++------
 mypy/typeshed/stdlib/xml/sax/xmlreader.pyi    | 82 +++++++++-------
 mypy/typeshed/stdlib/xxlimited.pyi            |  2 +
 32 files changed, 404 insertions(+), 217 deletions(-)

diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi
index 538c07d54aad..b48b1f7d318c 100644
--- a/mypy/typeshed/stdlib/_ctypes.pyi
+++ b/mypy/typeshed/stdlib/_ctypes.pyi
@@ -56,6 +56,12 @@ class _CData(metaclass=_CDataMeta):
     _b_base_: int
     _b_needsfree_: bool
     _objects: Mapping[Any, int] | None
+    # At runtime the following classmethods are available only on classes, not
+    # on instances. This can't be reflected properly in the type system:
+    #
+    # Structure.from_buffer(...)  # valid at runtime
+    # Structure(...).from_buffer(...)  # invalid at runtime
+    #
     @classmethod
     def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ...
     @classmethod
diff --git a/mypy/typeshed/stdlib/_curses.pyi b/mypy/typeshed/stdlib/_curses.pyi
index 61881fc09199..e2319a5fcc1f 100644
--- a/mypy/typeshed/stdlib/_curses.pyi
+++ b/mypy/typeshed/stdlib/_curses.pyi
@@ -276,12 +276,7 @@ if sys.platform != "win32":
     def can_change_color() -> bool: ...
     def cbreak(__flag: bool = True) -> None: ...
     def color_content(__color_number: int) -> tuple[int, int, int]: ...
-    # Changed in Python 3.8.8 and 3.9.2
-    if sys.version_info >= (3, 8):
-        def color_pair(pair_number: int) -> int: ...
-    else:
-        def color_pair(__color_number: int) -> int: ...
-
+    def color_pair(__pair_number: int) -> int: ...
     def curs_set(__visibility: int) -> int: ...
     def def_prog_mode() -> None: ...
     def def_shell_mode() -> None: ...
@@ -366,7 +361,10 @@ if sys.platform != "win32":
     ) -> bytes: ...
     def typeahead(__fd: int) -> None: ...
     def unctrl(__ch: _ChType) -> bytes: ...
-    def unget_wch(__ch: int | str) -> None: ...
+    if sys.version_info < (3, 12) or sys.platform != "darwin":
+        # The support for macos was dropped in 3.12
+        def unget_wch(__ch: int | str) -> None: ...
+
     def ungetch(__ch: _ChType) -> None: ...
     def ungetmouse(__id: int, __x: int, __y: int, __z: int, __bstate: int) -> None: ...
     def update_lines_cols() -> None: ...
@@ -441,10 +439,13 @@ if sys.platform != "win32":
         def getch(self) -> int: ...
         @overload
         def getch(self, y: int, x: int) -> int: ...
-        @overload
-        def get_wch(self) -> int | str: ...
-        @overload
-        def get_wch(self, y: int, x: int) -> int | str: ...
+        if sys.version_info < (3, 12) or sys.platform != "darwin":
+            # The support for macos was dropped in 3.12
+            @overload
+            def get_wch(self) -> int | str: ...
+            @overload
+            def get_wch(self, y: int, x: int) -> int | str: ...
+
         @overload
         def getkey(self) -> str: ...
         @overload
diff --git a/mypy/typeshed/stdlib/_posixsubprocess.pyi b/mypy/typeshed/stdlib/_posixsubprocess.pyi
index ca95336bb503..1708063720ba 100644
--- a/mypy/typeshed/stdlib/_posixsubprocess.pyi
+++ b/mypy/typeshed/stdlib/_posixsubprocess.pyi
@@ -6,15 +6,15 @@ from typing_extensions import SupportsIndex
 if sys.platform != "win32":
     def cloexec_pipe() -> tuple[int, int]: ...
     def fork_exec(
-        __process_args: Sequence[StrOrBytesPath] | None,
+        __args: Sequence[StrOrBytesPath] | None,
         __executable_list: Sequence[bytes],
         __close_fds: bool,
-        __fds_to_keep: tuple[int, ...],
-        __cwd_obj: str,
-        __env_list: Sequence[bytes] | None,
+        __pass_fds: tuple[int, ...],
+        __cwd: str,
+        __env: Sequence[bytes] | None,
         __p2cread: int,
         __p2cwrite: int,
-        __c2pred: int,
+        __c2pread: int,
         __c2pwrite: int,
         __errread: int,
         __errwrite: int,
@@ -23,9 +23,9 @@ if sys.platform != "win32":
         __restore_signals: int,
         __call_setsid: int,
         __pgid_to_set: int,
-        __gid_object: SupportsIndex | None,
-        __groups_list: list[int] | None,
-        __uid_object: SupportsIndex | None,
+        __gid: SupportsIndex | None,
+        __extra_groups: list[int] | None,
+        __uid: SupportsIndex | None,
         __child_umask: int,
         __preexec_fn: Callable[[], None],
         __allow_vfork: bool,
diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi
index 7ae67292e8cd..8e92138c748a 100644
--- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi
+++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi
@@ -7,8 +7,8 @@ from collections.abc import Awaitable, Callable, Iterable, Sequence, Set as Abst
 from dataclasses import Field
 from os import PathLike
 from types import FrameType, TracebackType
-from typing import Any, AnyStr, ClassVar, Generic, Protocol, TypeVar, overload
-from typing_extensions import Buffer, Final, Literal, LiteralString, TypeAlias, final
+from typing import Any, AnyStr, ClassVar, Generic, Protocol, SupportsFloat, SupportsInt, TypeVar, overload
+from typing_extensions import Buffer, Final, Literal, LiteralString, SupportsIndex, TypeAlias, final
 
 _KT = TypeVar("_KT")
 _KT_co = TypeVar("_KT_co", covariant=True)
@@ -312,3 +312,7 @@ TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None]
 #   https://github.com/microsoft/pyright/issues/4339
 class DataclassInstance(Protocol):
     __dataclass_fields__: ClassVar[dict[str, Field[Any]]]
+
+# Anything that can be passed to the int/float constructors
+ConvertibleToInt: TypeAlias = str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc
+ConvertibleToFloat: TypeAlias = str | ReadableBuffer | SupportsFloat | SupportsIndex
diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi
index 43893a298341..7fe1d09f7589 100644
--- a/mypy/typeshed/stdlib/abc.pyi
+++ b/mypy/typeshed/stdlib/abc.pyi
@@ -40,7 +40,8 @@ class abstractstaticmethod(staticmethod[_P, _R_co]):
 class abstractproperty(property):
     __isabstractmethod__: Literal[True]
 
-class ABC(metaclass=ABCMeta): ...
+class ABC(metaclass=ABCMeta):
+    __slots__ = ()
 
 def get_cache_token() -> object: ...
 
diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi
index 377138141340..a61b4e35fd56 100644
--- a/mypy/typeshed/stdlib/ast.pyi
+++ b/mypy/typeshed/stdlib/ast.pyi
@@ -138,8 +138,10 @@ class NodeVisitor:
     def visit_withitem(self, node: withitem) -> Any: ...
     if sys.version_info >= (3, 10):
         def visit_Match(self, node: Match) -> Any: ...
+        def visit_match_case(self, node: match_case) -> Any: ...
         def visit_MatchValue(self, node: MatchValue) -> Any: ...
         def visit_MatchSequence(self, node: MatchSequence) -> Any: ...
+        def visit_MatchSingleton(self, node: MatchSingleton) -> Any: ...
         def visit_MatchStar(self, node: MatchStar) -> Any: ...
         def visit_MatchMapping(self, node: MatchMapping) -> Any: ...
         def visit_MatchClass(self, node: MatchClass) -> Any: ...
@@ -149,6 +151,12 @@ class NodeVisitor:
     if sys.version_info >= (3, 11):
         def visit_TryStar(self, node: TryStar) -> Any: ...
 
+    if sys.version_info >= (3, 12):
+        def visit_TypeVar(self, node: TypeVar) -> Any: ...
+        def visit_ParamSpec(self, node: ParamSpec) -> Any: ...
+        def visit_TypeVarTuple(self, node: TypeVarTuple) -> Any: ...
+        def visit_TypeAlias(self, node: TypeAlias) -> Any: ...
+
     # visit methods for deprecated nodes
     def visit_ExtSlice(self, node: ExtSlice) -> Any: ...
     def visit_Index(self, node: Index) -> Any: ...
diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index 9e413579e0fb..dedd72933028 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -5,6 +5,8 @@ import types
 from _collections_abc import dict_items, dict_keys, dict_values
 from _typeshed import (
     AnyStr_co,
+    ConvertibleToFloat,
+    ConvertibleToInt,
     FileDescriptorOrPath,
     OpenBinaryMode,
     OpenBinaryModeReading,
@@ -24,7 +26,6 @@ from _typeshed import (
     SupportsRDivMod,
     SupportsRichComparison,
     SupportsRichComparisonT,
-    SupportsTrunc,
     SupportsWrite,
 )
 from collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet, Reversible, Set as AbstractSet, Sized
@@ -48,7 +49,6 @@ from typing import (  # noqa: Y022
     SupportsBytes,
     SupportsComplex,
     SupportsFloat,
-    SupportsInt,
     TypeVar,
     overload,
     type_check_only,
@@ -220,7 +220,7 @@ _LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0]  # noqa: Y026
 
 class int:
     @overload
-    def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> Self: ...
+    def __new__(cls, __x: ConvertibleToInt = ...) -> Self: ...
     @overload
     def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> Self: ...
     if sys.version_info >= (3, 8):
@@ -326,7 +326,7 @@ class int:
     def __index__(self) -> int: ...
 
 class float:
-    def __new__(cls, __x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ...
+    def __new__(cls, __x: ConvertibleToFloat = ...) -> Self: ...
     def as_integer_ratio(self) -> tuple[int, int]: ...
     def hex(self) -> str: ...
     def is_integer(self) -> bool: ...
@@ -774,7 +774,7 @@ class memoryview(Sequence[int]):
     def contiguous(self) -> bool: ...
     @property
     def nbytes(self) -> int: ...
-    def __init__(self, obj: ReadableBuffer) -> None: ...
+    def __new__(cls, obj: ReadableBuffer) -> Self: ...
     def __enter__(self) -> Self: ...
     def __exit__(
         self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None
@@ -853,9 +853,9 @@ class slice:
     @property
     def stop(self) -> Any: ...
     @overload
-    def __init__(self, __stop: Any) -> None: ...
+    def __new__(cls, __stop: Any) -> Self: ...
     @overload
-    def __init__(self, __start: Any, __stop: Any, __step: Any = ...) -> None: ...
+    def __new__(cls, __start: Any, __stop: Any, __step: Any = ...) -> Self: ...
     def __eq__(self, __value: object) -> bool: ...
     __hash__: ClassVar[None]  # type: ignore[assignment]
     def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ...
@@ -1110,7 +1110,7 @@ class frozenset(AbstractSet[_T_co], Generic[_T_co]):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
 
 class enumerate(Iterator[tuple[int, _T]], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ...
+    def __new__(cls, iterable: Iterable[_T], start: int = ...) -> Self: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> tuple[int, _T]: ...
     if sys.version_info >= (3, 9):
@@ -1125,9 +1125,9 @@ class range(Sequence[int]):
     @property
     def step(self) -> int: ...
     @overload
-    def __init__(self, __stop: SupportsIndex) -> None: ...
+    def __new__(cls, __stop: SupportsIndex) -> Self: ...
     @overload
-    def __init__(self, __start: SupportsIndex, __stop: SupportsIndex, __step: SupportsIndex = ...) -> None: ...
+    def __new__(cls, __start: SupportsIndex, __stop: SupportsIndex, __step: SupportsIndex = ...) -> Self: ...
     def count(self, __value: int) -> int: ...
     def index(self, __value: int) -> int: ...  # type: ignore[override]
     def __len__(self) -> int: ...
@@ -1320,11 +1320,11 @@ def exit(code: sys._ExitCode = None) -> NoReturn: ...
 
 class filter(Iterator[_T], Generic[_T]):
     @overload
-    def __init__(self, __function: None, __iterable: Iterable[_T | None]) -> None: ...
+    def __new__(cls, __function: None, __iterable: Iterable[_T | None]) -> Self: ...
     @overload
-    def __init__(self, __function: Callable[[_S], TypeGuard[_T]], __iterable: Iterable[_S]) -> None: ...
+    def __new__(cls, __function: Callable[[_S], TypeGuard[_T]], __iterable: Iterable[_S]) -> Self: ...
     @overload
-    def __init__(self, __function: Callable[[_T], Any], __iterable: Iterable[_T]) -> None: ...
+    def __new__(cls, __function: Callable[[_T], Any], __iterable: Iterable[_T]) -> Self: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
 
@@ -1379,35 +1379,35 @@ def locals() -> dict[str, Any]: ...
 
 class map(Iterator[_S], Generic[_S]):
     @overload
-    def __init__(self, __func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> None: ...
+    def __new__(cls, __func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> Self: ...
     @overload
-    def __init__(self, __func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> None: ...
+    def __new__(cls, __func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Self: ...
     @overload
-    def __init__(
-        self, __func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]
-    ) -> None: ...
+    def __new__(
+        cls, __func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]
+    ) -> Self: ...
     @overload
-    def __init__(
-        self,
+    def __new__(
+        cls,
         __func: Callable[[_T1, _T2, _T3, _T4], _S],
         __iter1: Iterable[_T1],
         __iter2: Iterable[_T2],
         __iter3: Iterable[_T3],
         __iter4: Iterable[_T4],
-    ) -> None: ...
+    ) -> Self: ...
     @overload
-    def __init__(
-        self,
+    def __new__(
+        cls,
         __func: Callable[[_T1, _T2, _T3, _T4, _T5], _S],
         __iter1: Iterable[_T1],
         __iter2: Iterable[_T2],
         __iter3: Iterable[_T3],
         __iter4: Iterable[_T4],
         __iter5: Iterable[_T5],
-    ) -> None: ...
+    ) -> Self: ...
     @overload
-    def __init__(
-        self,
+    def __new__(
+        cls,
         __func: Callable[..., _S],
         __iter1: Iterable[Any],
         __iter2: Iterable[Any],
@@ -1416,7 +1416,7 @@ class map(Iterator[_S], Generic[_S]):
         __iter5: Iterable[Any],
         __iter6: Iterable[Any],
         *iterables: Iterable[Any],
-    ) -> None: ...
+    ) -> Self: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> _S: ...
 
@@ -1725,6 +1725,8 @@ def vars(__object: Any = ...) -> dict[str, Any]: ...
 
 class zip(Iterator[_T_co], Generic[_T_co]):
     if sys.version_info >= (3, 10):
+        @overload
+        def __new__(cls, *, strict: bool = ...) -> zip[Any]: ...
         @overload
         def __new__(cls, __iter1: Iterable[_T1], *, strict: bool = ...) -> zip[tuple[_T1]]: ...
         @overload
@@ -1767,6 +1769,8 @@ class zip(Iterator[_T_co], Generic[_T_co]):
             strict: bool = ...,
         ) -> zip[tuple[Any, ...]]: ...
     else:
+        @overload
+        def __new__(cls) -> zip[Any]: ...
         @overload
         def __new__(cls, __iter1: Iterable[_T1]) -> zip[tuple[_T1]]: ...
         @overload
@@ -1812,11 +1816,17 @@ def __import__(
 ) -> types.ModuleType: ...
 def __build_class__(__func: Callable[[], _Cell | Any], __name: str, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ...
 
-# Actually the type of Ellipsis is <type 'ellipsis'>, but since it's
-# not exposed anywhere under that name, we make it private here.
-@final
-@type_check_only
-class ellipsis: ...
+if sys.version_info >= (3, 10):
+    # In Python 3.10, EllipsisType is exposed publicly in the types module.
+    @final
+    class ellipsis: ...
+
+else:
+    # Actually the type of Ellipsis is <type 'ellipsis'>, but since it's
+    # not exposed anywhere under that name, we make it private here.
+    @final
+    @type_check_only
+    class ellipsis: ...
 
 Ellipsis: ellipsis
 
diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi
index c9b6a4a82da6..f8c92392a599 100644
--- a/mypy/typeshed/stdlib/codecs.pyi
+++ b/mypy/typeshed/stdlib/codecs.pyi
@@ -78,16 +78,16 @@ class _Stream(_WritableStream, _ReadableStream, Protocol): ...
 # They were much more common in Python 2 than in Python 3.
 
 class _Encoder(Protocol):
-    def __call__(self, input: str, errors: str = ...) -> tuple[bytes, int]: ...  # signature of Codec().encode
+    def __call__(self, __input: str, __errors: str = ...) -> tuple[bytes, int]: ...  # signature of Codec().encode
 
 class _Decoder(Protocol):
-    def __call__(self, input: bytes, errors: str = ...) -> tuple[str, int]: ...  # signature of Codec().decode
+    def __call__(self, __input: bytes, __errors: str = ...) -> tuple[str, int]: ...  # signature of Codec().decode
 
 class _StreamReader(Protocol):
-    def __call__(self, stream: _ReadableStream, errors: str = ...) -> StreamReader: ...
+    def __call__(self, __stream: _ReadableStream, __errors: str = ...) -> StreamReader: ...
 
 class _StreamWriter(Protocol):
-    def __call__(self, stream: _WritableStream, errors: str = ...) -> StreamWriter: ...
+    def __call__(self, __stream: _WritableStream, __errors: str = ...) -> StreamWriter: ...
 
 class _IncrementalEncoder(Protocol):
     def __call__(self, errors: str = ...) -> IncrementalEncoder: ...
diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi
index 3b8d92f78612..1d560117a54f 100644
--- a/mypy/typeshed/stdlib/collections/__init__.pyi
+++ b/mypy/typeshed/stdlib/collections/__init__.pyi
@@ -373,6 +373,15 @@ class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
     @overload
     def setdefault(self, key: _KT, default: _VT) -> _VT: ...
     def __eq__(self, __value: object) -> bool: ...
+    if sys.version_info >= (3, 9):
+        @overload
+        def __or__(self, __value: dict[_KT, _VT]) -> Self: ...
+        @overload
+        def __or__(self, __value: dict[_T1, _T2]) -> OrderedDict[_KT | _T1, _VT | _T2]: ...
+        @overload
+        def __ror__(self, __value: dict[_KT, _VT]) -> Self: ...
+        @overload
+        def __ror__(self, __value: dict[_T1, _T2]) -> OrderedDict[_KT | _T1, _VT | _T2]: ...  # type: ignore[misc]
 
 class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]):
     default_factory: Callable[[], _VT] | None
diff --git a/mypy/typeshed/stdlib/fcntl.pyi b/mypy/typeshed/stdlib/fcntl.pyi
index 6aec7515f330..56fd5679a1c8 100644
--- a/mypy/typeshed/stdlib/fcntl.pyi
+++ b/mypy/typeshed/stdlib/fcntl.pyi
@@ -101,6 +101,11 @@ if sys.platform != "win32":
         I_STR: int
         I_SWROPT: int
         I_UNLINK: int
+
+    if sys.version_info >= (3, 12) and sys.platform == "linux":
+        FICLONE: int
+        FICLONERANGE: int
+
     @overload
     def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = 0) -> int: ...
     @overload
diff --git a/mypy/typeshed/stdlib/http/server.pyi b/mypy/typeshed/stdlib/http/server.pyi
index c9700f70e791..22c33bc3787a 100644
--- a/mypy/typeshed/stdlib/http/server.pyi
+++ b/mypy/typeshed/stdlib/http/server.pyi
@@ -54,6 +54,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
     extensions_map: dict[str, str]
     if sys.version_info >= (3, 12):
         index_pages: ClassVar[tuple[str, ...]]
+    directory: str
     def __init__(
         self,
         request: socketserver._RequestType,
diff --git a/mypy/typeshed/stdlib/logging/handlers.pyi b/mypy/typeshed/stdlib/logging/handlers.pyi
index ad5bf392b50f..2280dbad4c5d 100644
--- a/mypy/typeshed/stdlib/logging/handlers.pyi
+++ b/mypy/typeshed/stdlib/logging/handlers.pyi
@@ -7,6 +7,7 @@ from collections.abc import Callable
 from logging import FileHandler, Handler, LogRecord
 from re import Pattern
 from socket import SocketKind, socket
+from threading import Thread
 from typing import Any, ClassVar, Protocol, TypeVar
 
 _T = TypeVar("_T")
@@ -264,6 +265,7 @@ class QueueListener:
     handlers: tuple[Handler, ...]  # undocumented
     respect_handler_level: bool  # undocumented
     queue: _QueueLike[Any]  # undocumented
+    _thread: Thread | None  # undocumented
     def __init__(self, queue: _QueueLike[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: ...
     def dequeue(self, block: bool) -> LogRecord: ...
     def prepare(self, record: LogRecord) -> Any: ...
diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi
index 38e1924392c4..09319980692f 100644
--- a/mypy/typeshed/stdlib/mmap.pyi
+++ b/mypy/typeshed/stdlib/mmap.pyi
@@ -16,6 +16,8 @@ if sys.platform == "linux":
     MAP_EXECUTABLE: int
     if sys.version_info >= (3, 10):
         MAP_POPULATE: int
+if sys.version_info >= (3, 11) and sys.platform != "win32" and sys.platform != "darwin":
+    MAP_STACK: int
 
 if sys.platform != "win32":
     MAP_ANON: int
diff --git a/mypy/typeshed/stdlib/multiprocessing/util.pyi b/mypy/typeshed/stdlib/multiprocessing/util.pyi
index 7ca650511e51..aeb46f85a327 100644
--- a/mypy/typeshed/stdlib/multiprocessing/util.pyi
+++ b/mypy/typeshed/stdlib/multiprocessing/util.pyi
@@ -1,9 +1,8 @@
 import threading
-from _typeshed import Incomplete, ReadableBuffer, SupportsTrunc, Unused
+from _typeshed import ConvertibleToInt, Incomplete, Unused
 from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence
 from logging import Logger, _Level as _LoggingLevel
-from typing import Any, SupportsInt
-from typing_extensions import SupportsIndex
+from typing import Any
 
 __all__ = [
     "sub_debug",
@@ -77,9 +76,4 @@ class ForkAwareLocal(threading.local): ...
 MAXFD: int
 
 def close_all_fds_except(fds: Iterable[int]) -> None: ...
-def spawnv_passfds(
-    path: bytes,
-    # args is anything that can be passed to the int constructor
-    args: Sequence[str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc],
-    passfds: Sequence[int],
-) -> int: ...
+def spawnv_passfds(path: bytes, args: Sequence[ConvertibleToInt], passfds: Sequence[int]) -> int: ...
diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi
index 961858ce3c19..fa4c55011eba 100644
--- a/mypy/typeshed/stdlib/os/__init__.pyi
+++ b/mypy/typeshed/stdlib/os/__init__.pyi
@@ -2,6 +2,7 @@ import sys
 from _typeshed import (
     AnyStr_co,
     BytesPath,
+    FileDescriptor,
     FileDescriptorLike,
     FileDescriptorOrPath,
     GenericPath,
@@ -121,6 +122,12 @@ if sys.platform == "linux":
     GRND_NONBLOCK: int
     GRND_RANDOM: int
 
+if sys.platform == "darwin" and sys.version_info >= (3, 12):
+    PRIO_DARWIN_BG: int
+    PRIO_DARWIN_NONUI: int
+    PRIO_DARWIN_PROCESS: int
+    PRIO_DARWIN_THREAD: int
+
 SEEK_SET: int
 SEEK_CUR: int
 SEEK_END: int
@@ -252,12 +259,14 @@ environ: _Environ[str]
 if sys.platform != "win32":
     environb: _Environ[bytes]
 
+if sys.version_info >= (3, 11) or sys.platform != "win32":
+    EX_OK: int
+
 if sys.platform != "win32":
     confstr_names: dict[str, int]
     pathconf_names: dict[str, int]
     sysconf_names: dict[str, int]
 
-    EX_OK: int
     EX_USAGE: int
     EX_DATAERR: int
     EX_NOINPUT: int
@@ -339,6 +348,11 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo
         if sys.version_info >= (3, 8):
             @property
             def st_reparse_tag(self) -> int: ...
+        if sys.version_info >= (3, 12):
+            @property
+            def st_birthtime(self) -> float: ...  # time of file creation in seconds
+            @property
+            def st_birthtime_ns(self) -> int: ...  # time of file creation in nanoseconds
     else:
         @property
         def st_blocks(self) -> int: ...  # number of blocks allocated for file
@@ -347,13 +361,13 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo
         @property
         def st_rdev(self) -> int: ...  # type of device if an inode device
         if sys.platform != "linux":
-            # These properties are available on MacOS, but not on Windows or Ubuntu.
+            # These properties are available on MacOS, but not Ubuntu.
             # On other Unix systems (such as FreeBSD), the following attributes may be
             # available (but may be only filled out if root tries to use them):
             @property
             def st_gen(self) -> int: ...  # file generation number
             @property
-            def st_birthtime(self) -> int: ...  # time of file creation
+            def st_birthtime(self) -> float: ...  # time of file creation in seconds
     if sys.platform == "darwin":
         @property
         def st_flags(self) -> int: ...  # user defined flags for file
@@ -484,8 +498,8 @@ if sys.platform != "win32":
     def setpgid(__pid: int, __pgrp: int) -> None: ...
     def setregid(__rgid: int, __egid: int) -> None: ...
     if sys.platform != "darwin":
-        def setresgid(rgid: int, egid: int, sgid: int) -> None: ...
-        def setresuid(ruid: int, euid: int, suid: int) -> None: ...
+        def setresgid(__rgid: int, __egid: int, __sgid: int) -> None: ...
+        def setresuid(__ruid: int, __euid: int, __suid: int) -> None: ...
 
     def setreuid(__ruid: int, __euid: int) -> None: ...
     def getsid(__pid: int) -> int: ...
@@ -614,13 +628,15 @@ def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | N
 def pipe() -> tuple[int, int]: ...
 def read(__fd: int, __length: int) -> bytes: ...
 
+if sys.version_info >= (3, 12) or sys.platform != "win32":
+    def get_blocking(__fd: int) -> bool: ...
+    def set_blocking(__fd: int, __blocking: bool) -> None: ...
+
 if sys.platform != "win32":
     def fchmod(fd: int, mode: int) -> None: ...
     def fchown(fd: int, uid: int, gid: int) -> None: ...
     def fpathconf(__fd: int, __name: str | int) -> int: ...
     def fstatvfs(__fd: int) -> statvfs_result: ...
-    def get_blocking(__fd: int) -> bool: ...
-    def set_blocking(__fd: int, __blocking: bool) -> None: ...
     def lockf(__fd: int, __command: int, __length: int) -> None: ...
     def openpty() -> tuple[int, int]: ...  # some flavors of Unix
     if sys.platform != "darwin":
@@ -641,18 +657,20 @@ if sys.platform != "win32":
         RWF_SYNC: int
         RWF_HIPRI: int
         RWF_NOWAIT: int
-    @overload
-    def sendfile(out_fd: int, in_fd: int, offset: int | None, count: int) -> int: ...
-    @overload
-    def sendfile(
-        out_fd: int,
-        in_fd: int,
-        offset: int,
-        count: int,
-        headers: Sequence[ReadableBuffer] = ...,
-        trailers: Sequence[ReadableBuffer] = ...,
-        flags: int = 0,
-    ) -> int: ...  # FreeBSD and Mac OS X only
+
+    if sys.platform == "linux":
+        def sendfile(out_fd: FileDescriptor, in_fd: FileDescriptor, offset: int | None, count: int) -> int: ...
+    else:
+        def sendfile(
+            out_fd: FileDescriptor,
+            in_fd: FileDescriptor,
+            offset: int,
+            count: int,
+            headers: Sequence[ReadableBuffer] = ...,
+            trailers: Sequence[ReadableBuffer] = ...,
+            flags: int = 0,
+        ) -> int: ...  # FreeBSD and Mac OS X only
+
     def readv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer]) -> int: ...
     def writev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer]) -> int: ...
 
@@ -1042,3 +1060,45 @@ if sys.version_info >= (3, 9):
 
     if sys.platform == "linux":
         def pidfd_open(pid: int, flags: int = ...) -> int: ...
+
+if sys.version_info >= (3, 12) and sys.platform == "win32":
+    def listdrives() -> list[str]: ...
+    def listmounts(volume: str) -> list[str]: ...
+    def listvolumes() -> list[str]: ...
+
+if sys.version_info >= (3, 10) and sys.platform == "linux":
+    EFD_CLOEXEC: int
+    EFD_NONBLOCK: int
+    EFD_SEMAPHORE: int
+    SPLICE_F_MORE: int
+    SPLICE_F_MOVE: int
+    SPLICE_F_NONBLOCK: int
+    def eventfd(initval: int, flags: int = 524288) -> FileDescriptor: ...
+    def eventfd_read(fd: FileDescriptor) -> int: ...
+    def eventfd_write(fd: FileDescriptor, value: int) -> None: ...
+    def splice(
+        src: FileDescriptor,
+        dst: FileDescriptor,
+        count: int,
+        offset_src: int | None = ...,
+        offset_dst: int | None = ...,
+        flags: int = 0,
+    ) -> int: ...
+
+if sys.version_info >= (3, 12) and sys.platform == "linux":
+    CLONE_FILES: int
+    CLONE_FS: int
+    CLONE_NEWCGROUP: int
+    CLONE_NEWIPC: int
+    CLONE_NEWNET: int
+    CLONE_NEWNS: int
+    CLONE_NEWPID: int
+    CLONE_NEWTIME: int
+    CLONE_NEWUSER: int
+    CLONE_NEWUTS: int
+    CLONE_SIGHAND: int
+    CLONE_SYSVSEM: int
+    CLONE_THREAD: int
+    CLONE_VM: int
+    def unshare(flags: int) -> None: ...
+    def setns(fd: FileDescriptorLike, nstype: int = 0) -> None: ...
diff --git a/mypy/typeshed/stdlib/posix.pyi b/mypy/typeshed/stdlib/posix.pyi
index ffd96757586b..ab6bf2e63be5 100644
--- a/mypy/typeshed/stdlib/posix.pyi
+++ b/mypy/typeshed/stdlib/posix.pyi
@@ -236,6 +236,20 @@ if sys.platform != "win32":
             removexattr as removexattr,
             setxattr as setxattr,
         )
+
+        if sys.version_info >= (3, 10):
+            from os import (
+                EFD_CLOEXEC as EFD_CLOEXEC,
+                EFD_NONBLOCK as EFD_NONBLOCK,
+                EFD_SEMAPHORE as EFD_SEMAPHORE,
+                SPLICE_F_MORE as SPLICE_F_MORE,
+                SPLICE_F_MOVE as SPLICE_F_MOVE,
+                SPLICE_F_NONBLOCK as SPLICE_F_NONBLOCK,
+                eventfd as eventfd,
+                eventfd_read as eventfd_read,
+                eventfd_write as eventfd_write,
+                splice as splice,
+            )
     else:
         from os import chflags as chflags, lchflags as lchflags, lchmod as lchmod
 
@@ -314,6 +328,34 @@ if sys.platform != "win32":
     if sys.platform != "darwin":
         from os import RWF_DSYNC as RWF_DSYNC, RWF_HIPRI as RWF_HIPRI, RWF_NOWAIT as RWF_NOWAIT, RWF_SYNC as RWF_SYNC
 
+    if sys.version_info >= (3, 12) and sys.platform == "linux":
+        from os import (
+            CLONE_FILES as CLONE_FILES,
+            CLONE_FS as CLONE_FS,
+            CLONE_NEWCGROUP as CLONE_NEWCGROUP,
+            CLONE_NEWIPC as CLONE_NEWIPC,
+            CLONE_NEWNET as CLONE_NEWNET,
+            CLONE_NEWNS as CLONE_NEWNS,
+            CLONE_NEWPID as CLONE_NEWPID,
+            CLONE_NEWTIME as CLONE_NEWTIME,
+            CLONE_NEWUSER as CLONE_NEWUSER,
+            CLONE_NEWUTS as CLONE_NEWUTS,
+            CLONE_SIGHAND as CLONE_SIGHAND,
+            CLONE_SYSVSEM as CLONE_SYSVSEM,
+            CLONE_THREAD as CLONE_THREAD,
+            CLONE_VM as CLONE_VM,
+            setns as setns,
+            unshare as unshare,
+        )
+
+    if sys.version_info >= (3, 12) and sys.platform == "darwin":
+        from os import (
+            PRIO_DARWIN_BG as PRIO_DARWIN_BG,
+            PRIO_DARWIN_NONUI as PRIO_DARWIN_NONUI,
+            PRIO_DARWIN_PROCESS as PRIO_DARWIN_PROCESS,
+            PRIO_DARWIN_THREAD as PRIO_DARWIN_THREAD,
+        )
+
     # Not same as os.environ or os.environb
     # Because of this variable, we can't do "from posix import *" in os/__init__.pyi
     environ: dict[bytes, bytes]
diff --git a/mypy/typeshed/stdlib/resource.pyi b/mypy/typeshed/stdlib/resource.pyi
index f2e979ff89af..57cefb4681ac 100644
--- a/mypy/typeshed/stdlib/resource.pyi
+++ b/mypy/typeshed/stdlib/resource.pyi
@@ -1,6 +1,5 @@
 import sys
 from _typeshed import structseq
-from typing import overload
 from typing_extensions import Final, final
 
 if sys.platform != "win32":
@@ -86,8 +85,8 @@ if sys.platform != "win32":
     def getrusage(__who: int) -> struct_rusage: ...
     def setrlimit(__resource: int, __limits: tuple[int, int]) -> None: ...
     if sys.platform == "linux":
-        @overload
-        def prlimit(pid: int, resource: int, limits: tuple[int, int]) -> tuple[int, int]: ...
-        @overload
-        def prlimit(pid: int, resource: int) -> tuple[int, int]: ...
+        if sys.version_info >= (3, 12):
+            def prlimit(__pid: int, __resource: int, __limits: tuple[int, int] | None = None) -> tuple[int, int]: ...
+        else:
+            def prlimit(__pid: int, __resource: int, __limits: tuple[int, int] = ...) -> tuple[int, int]: ...
     error = OSError
diff --git a/mypy/typeshed/stdlib/signal.pyi b/mypy/typeshed/stdlib/signal.pyi
index 4c961a0c9aab..72c78f1b69f5 100644
--- a/mypy/typeshed/stdlib/signal.pyi
+++ b/mypy/typeshed/stdlib/signal.pyi
@@ -170,8 +170,12 @@ else:
             @property
             def si_band(self) -> int: ...
 
-        def sigtimedwait(sigset: Iterable[int], timeout: float) -> struct_siginfo | None: ...
-        def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo: ...
+        if sys.version_info >= (3, 10):
+            def sigtimedwait(__sigset: Iterable[int], __timeout: float) -> struct_siginfo | None: ...
+            def sigwaitinfo(__sigset: Iterable[int]) -> struct_siginfo: ...
+        else:
+            def sigtimedwait(sigset: Iterable[int], timeout: float) -> struct_siginfo | None: ...
+            def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo: ...
 
 if sys.version_info >= (3, 8):
     def strsignal(__signalnum: _SIGNUM) -> str | None: ...
diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi
index 73762cd75e79..faf667afb475 100644
--- a/mypy/typeshed/stdlib/ssl.pyi
+++ b/mypy/typeshed/stdlib/ssl.pyi
@@ -203,7 +203,6 @@ class Options(enum.IntFlag):
         OP_ENABLE_MIDDLEBOX_COMPAT: int
     if sys.version_info >= (3, 12):
         OP_LEGACY_SERVER_CONNECT: int
-    if sys.version_info >= (3, 12) and sys.platform != "linux":
         OP_ENABLE_KTLS: int
     if sys.version_info >= (3, 11):
         OP_IGNORE_UNEXPECTED_EOF: int
@@ -227,7 +226,6 @@ if sys.version_info >= (3, 8):
     OP_ENABLE_MIDDLEBOX_COMPAT: Options
 if sys.version_info >= (3, 12):
     OP_LEGACY_SERVER_CONNECT: Options
-if sys.version_info >= (3, 12) and sys.platform != "linux":
     OP_ENABLE_KTLS: Options
 if sys.version_info >= (3, 11):
     OP_IGNORE_UNEXPECTED_EOF: Options
diff --git a/mypy/typeshed/stdlib/sys.pyi b/mypy/typeshed/stdlib/sys.pyi
index ca049124053a..a5e819d53326 100644
--- a/mypy/typeshed/stdlib/sys.pyi
+++ b/mypy/typeshed/stdlib/sys.pyi
@@ -225,9 +225,10 @@ class _thread_info(_UninstantiableStructseq, tuple[_ThreadInfoName, _ThreadInfoL
     def version(self) -> str | None: ...
 
 thread_info: _thread_info
+_ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"]
 
 @final
-class _version_info(_UninstantiableStructseq, tuple[int, int, int, str, int]):
+class _version_info(_UninstantiableStructseq, tuple[int, int, int, _ReleaseLevel, int]):
     @property
     def major(self) -> int: ...
     @property
@@ -235,7 +236,7 @@ class _version_info(_UninstantiableStructseq, tuple[int, int, int, str, int]):
     @property
     def micro(self) -> int: ...
     @property
-    def releaselevel(self) -> str: ...
+    def releaselevel(self) -> _ReleaseLevel: ...
     @property
     def serial(self) -> int: ...
 
diff --git a/mypy/typeshed/stdlib/syslog.pyi b/mypy/typeshed/stdlib/syslog.pyi
index cfa8df887c1b..0b769301a482 100644
--- a/mypy/typeshed/stdlib/syslog.pyi
+++ b/mypy/typeshed/stdlib/syslog.pyi
@@ -36,11 +36,11 @@ if sys.platform != "win32":
     LOG_USER: Literal[8]
     LOG_UUCP: Literal[64]
     LOG_WARNING: Literal[4]
-    def LOG_MASK(a: int) -> int: ...
-    def LOG_UPTO(a: int) -> int: ...
+    def LOG_MASK(__pri: int) -> int: ...
+    def LOG_UPTO(__pri: int) -> int: ...
     def closelog() -> None: ...
     def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ...
-    def setlogmask(x: int) -> int: ...
+    def setlogmask(__maskpri: int) -> int: ...
     @overload
     def syslog(priority: int, message: str) -> None: ...
     @overload
diff --git a/mypy/typeshed/stdlib/termios.pyi b/mypy/typeshed/stdlib/termios.pyi
index bf8d7bee2473..776396cce407 100644
--- a/mypy/typeshed/stdlib/termios.pyi
+++ b/mypy/typeshed/stdlib/termios.pyi
@@ -3,10 +3,12 @@ from _typeshed import FileDescriptorLike
 from typing import Any
 from typing_extensions import TypeAlias
 
-if sys.platform != "win32":
-    # Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints.
-    _Attr: TypeAlias = list[int | list[bytes | int]]
+# Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints.
+_Attr: TypeAlias = list[int | list[bytes | int]] | list[int | list[bytes]] | list[int | list[int]]
+# Same as _Attr for return types; we use Any to avoid a union.
+_AttrReturn: TypeAlias = list[Any]
 
+if sys.platform != "win32":
     B0: int
     B1000000: int
     B110: int
@@ -252,7 +254,7 @@ if sys.platform != "win32":
     XCASE: int
     XTABS: int
 
-    def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ...  # Returns _Attr; we use Any to avoid a union in the return type
+    def tcgetattr(__fd: FileDescriptorLike) -> _AttrReturn: ...
     def tcsetattr(__fd: FileDescriptorLike, __when: int, __attributes: _Attr) -> None: ...
     def tcsendbreak(__fd: FileDescriptorLike, __duration: int) -> None: ...
     def tcdrain(__fd: FileDescriptorLike) -> None: ...
diff --git a/mypy/typeshed/stdlib/tty.pyi b/mypy/typeshed/stdlib/tty.pyi
index 43f2e1cf9087..add0d57a8d4b 100644
--- a/mypy/typeshed/stdlib/tty.pyi
+++ b/mypy/typeshed/stdlib/tty.pyi
@@ -1,9 +1,16 @@
 import sys
+import termios
 from typing import IO
 from typing_extensions import TypeAlias
 
 if sys.platform != "win32":
     __all__ = ["setraw", "setcbreak"]
+    if sys.version_info >= (3, 12):
+        __all__ += ["cfmakeraw", "cfmakecbreak"]
+
+        _ModeSetterReturn: TypeAlias = termios._AttrReturn
+    else:
+        _ModeSetterReturn: TypeAlias = None
 
     _FD: TypeAlias = int | IO[str]
 
@@ -15,5 +22,9 @@ if sys.platform != "win32":
     ISPEED: int
     OSPEED: int
     CC: int
-    def setraw(fd: _FD, when: int = 2) -> None: ...
-    def setcbreak(fd: _FD, when: int = 2) -> None: ...
+    def setraw(fd: _FD, when: int = 2) -> _ModeSetterReturn: ...
+    def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: ...
+
+    if sys.version_info >= (3, 12):
+        def cfmakeraw(mode: termios._Attr) -> None: ...
+        def cfmakecbreak(mode: termios._Attr) -> None: ...
diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi
index 2f4bd1a88047..8559063834c9 100644
--- a/mypy/typeshed/stdlib/types.pyi
+++ b/mypy/typeshed/stdlib/types.pyi
@@ -69,7 +69,7 @@ _VT_co = TypeVar("_VT_co", covariant=True)
 @final
 class _Cell:
     if sys.version_info >= (3, 8):
-        def __init__(self, __contents: object = ...) -> None: ...
+        def __new__(cls, __contents: object = ...) -> Self: ...
 
     def __eq__(self, __value: object) -> bool: ...
     __hash__: ClassVar[None]  # type: ignore[assignment]
@@ -96,14 +96,14 @@ class FunctionType:
         __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...]
 
     __module__: str
-    def __init__(
-        self,
+    def __new__(
+        cls,
         code: CodeType,
         globals: dict[str, Any],
         name: str | None = ...,
         argdefs: tuple[object, ...] | None = ...,
         closure: tuple[_Cell, ...] | None = ...,
-    ) -> None: ...
+    ) -> Self: ...
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
     @overload
     def __get__(self, __instance: None, __owner: type) -> FunctionType: ...
@@ -162,8 +162,8 @@ class CodeType:
         def co_positions(self) -> Iterable[tuple[int | None, int | None, int | None, int | None]]: ...
 
     if sys.version_info >= (3, 11):
-        def __init__(
-            self,
+        def __new__(
+            cls,
             __argcount: int,
             __posonlyargcount: int,
             __kwonlyargcount: int,
@@ -182,10 +182,10 @@ class CodeType:
             __exceptiontable: bytes,
             __freevars: tuple[str, ...] = ...,
             __cellvars: tuple[str, ...] = ...,
-        ) -> None: ...
+        ) -> Self: ...
     elif sys.version_info >= (3, 10):
-        def __init__(
-            self,
+        def __new__(
+            cls,
             __argcount: int,
             __posonlyargcount: int,
             __kwonlyargcount: int,
@@ -202,10 +202,10 @@ class CodeType:
             __linetable: bytes,
             __freevars: tuple[str, ...] = ...,
             __cellvars: tuple[str, ...] = ...,
-        ) -> None: ...
+        ) -> Self: ...
     elif sys.version_info >= (3, 8):
-        def __init__(
-            self,
+        def __new__(
+            cls,
             __argcount: int,
             __posonlyargcount: int,
             __kwonlyargcount: int,
@@ -222,10 +222,10 @@ class CodeType:
             __lnotab: bytes,
             __freevars: tuple[str, ...] = ...,
             __cellvars: tuple[str, ...] = ...,
-        ) -> None: ...
+        ) -> Self: ...
     else:
-        def __init__(
-            self,
+        def __new__(
+            cls,
             __argcount: int,
             __kwonlyargcount: int,
             __nlocals: int,
@@ -241,7 +241,7 @@ class CodeType:
             __lnotab: bytes,
             __freevars: tuple[str, ...] = ...,
             __cellvars: tuple[str, ...] = ...,
-        ) -> None: ...
+        ) -> Self: ...
     if sys.version_info >= (3, 11):
         def replace(
             self,
@@ -311,7 +311,7 @@ class CodeType:
 @final
 class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]):
     __hash__: ClassVar[None]  # type: ignore[assignment]
-    def __init__(self, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> None: ...
+    def __new__(cls, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> Self: ...
     def __getitem__(self, __key: _KT) -> _VT_co: ...
     def __iter__(self) -> Iterator[_KT]: ...
     def __len__(self) -> int: ...
@@ -444,7 +444,7 @@ class MethodType:
     def __name__(self) -> str: ...  # inherited from the added function
     @property
     def __qualname__(self) -> str: ...  # inherited from the added function
-    def __init__(self, __func: Callable[..., Any], __obj: object) -> None: ...
+    def __new__(cls, __func: Callable[..., Any], __obj: object) -> Self: ...
     def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
     def __eq__(self, __value: object) -> bool: ...
     def __hash__(self) -> int: ...
@@ -513,7 +513,7 @@ class ClassMethodDescriptorType:
 
 @final
 class TracebackType:
-    def __init__(self, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> None: ...
+    def __new__(cls, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> Self: ...
     tb_next: TracebackType | None
     # the rest are read-only even in 3.7
     @property
@@ -610,7 +610,7 @@ if sys.version_info >= (3, 9):
         def __args__(self) -> tuple[Any, ...]: ...
         @property
         def __parameters__(self) -> tuple[Any, ...]: ...
-        def __init__(self, origin: type, args: Any) -> None: ...
+        def __new__(cls, origin: type, args: Any) -> Self: ...
         def __getitem__(self, __typeargs: Any) -> GenericAlias: ...
         def __eq__(self, __value: object) -> bool: ...
         def __hash__(self) -> int: ...
diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi
index 2c1ebe6d7f95..6deb0ffd02b3 100644
--- a/mypy/typeshed/stdlib/typing.pyi
+++ b/mypy/typeshed/stdlib/typing.pyi
@@ -709,8 +709,10 @@ class IO(Iterator[AnyStr], Generic[AnyStr]):
     # See #8726
     @property
     def mode(self) -> str: ...
+    # Usually str, but may be bytes if a bytes path was passed to open(). See #10737.
+    # If PEP 696 becomes available, we may want to use a defaulted TypeVar here.
     @property
-    def name(self) -> str: ...
+    def name(self) -> str | Any: ...
     @abstractmethod
     def close(self) -> None: ...
     @property
diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi
index 9320dc50b6bb..b5e2341cd020 100644
--- a/mypy/typeshed/stdlib/typing_extensions.pyi
+++ b/mypy/typeshed/stdlib/typing_extensions.pyi
@@ -149,6 +149,7 @@ __all__ = [
     "Collection",
     "Container",
     "Dict",
+    "Doc",
     "ForwardRef",
     "FrozenSet",
     "Generator",
@@ -489,3 +490,9 @@ if sys.version_info >= (3, 13):
 else:
     def is_protocol(__tp: type) -> bool: ...
     def get_protocol_members(__tp: type) -> frozenset[str]: ...
+
+class Doc:
+    documentation: str
+    def __init__(self, __documentation: str) -> None: ...
+    def __hash__(self) -> int: ...
+    def __eq__(self, other: object) -> bool: ...
diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi
index 1f58f266ee89..aa04e16d62ec 100644
--- a/mypy/typeshed/stdlib/unittest/case.pyi
+++ b/mypy/typeshed/stdlib/unittest/case.pyi
@@ -126,9 +126,9 @@ class TestCase:
     @overload
     def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ...
     @overload
-    def assertLessEqual(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ...
+    def assertLessEqual(self, a: SupportsDunderLE[_T], b: _T, msg: Any = None) -> None: ...
     @overload
-    def assertLessEqual(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ...
+    def assertLessEqual(self, a: _T, b: SupportsDunderGE[_T], msg: Any = None) -> None: ...
     # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp`
     # are not using `ParamSpec` intentionally,
     # because they might be used with explicitly wrong arg types to raise some error in tests.
diff --git a/mypy/typeshed/stdlib/xml/sax/__init__.pyi b/mypy/typeshed/stdlib/xml/sax/__init__.pyi
index 8bcf902df8d8..f726eae0516f 100644
--- a/mypy/typeshed/stdlib/xml/sax/__init__.pyi
+++ b/mypy/typeshed/stdlib/xml/sax/__init__.pyi
@@ -2,12 +2,18 @@ import sys
 from _typeshed import ReadableBuffer, StrPath, SupportsRead, _T_co
 from collections.abc import Iterable
 from typing import Any, NoReturn, Protocol
+from typing_extensions import TypeAlias
 from xml.sax.handler import ContentHandler as ContentHandler, ErrorHandler as ErrorHandler
 from xml.sax.xmlreader import Locator, XMLReader
 
 class _SupportsReadClose(SupportsRead[_T_co], Protocol[_T_co]):
     def close(self) -> None: ...
 
+if sys.version_info >= (3, 8):
+    _Source: TypeAlias = StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[str]
+else:
+    _Source: TypeAlias = str | _SupportsReadClose[bytes] | _SupportsReadClose[str]
+
 class SAXException(Exception):
     def __init__(self, msg: str, exception: Exception | None = None) -> None: ...
     def getMessage(self) -> str: ...
@@ -28,20 +34,13 @@ class SAXReaderNotAvailable(SAXNotSupportedException): ...
 default_parser_list: list[str]
 
 if sys.version_info >= (3, 8):
+
     def make_parser(parser_list: Iterable[str] = ()) -> XMLReader: ...
-    def parse(
-        source: StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[str],
-        handler: ContentHandler,
-        errorHandler: ErrorHandler = ...,
-    ) -> None: ...
 
 else:
+
     def make_parser(parser_list: list[str] = []) -> XMLReader: ...
-    def parse(
-        source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str],
-        handler: ContentHandler,
-        errorHandler: ErrorHandler = ...,
-    ) -> None: ...
 
+def parse(source: _Source, handler: ContentHandler, errorHandler: ErrorHandler = ...) -> None: ...
 def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ...
 def _create_parser(parser_name: str) -> XMLReader: ...
diff --git a/mypy/typeshed/stdlib/xml/sax/handler.pyi b/mypy/typeshed/stdlib/xml/sax/handler.pyi
index 63b725bd6da6..30fe31d51374 100644
--- a/mypy/typeshed/stdlib/xml/sax/handler.pyi
+++ b/mypy/typeshed/stdlib/xml/sax/handler.pyi
@@ -1,5 +1,6 @@
 import sys
 from typing import NoReturn
+from xml.sax import xmlreader
 
 version: str
 
@@ -9,19 +10,19 @@ class ErrorHandler:
     def warning(self, exception: BaseException) -> None: ...
 
 class ContentHandler:
-    def setDocumentLocator(self, locator): ...
-    def startDocument(self): ...
-    def endDocument(self): ...
-    def startPrefixMapping(self, prefix, uri): ...
-    def endPrefixMapping(self, prefix): ...
-    def startElement(self, name, attrs): ...
-    def endElement(self, name): ...
-    def startElementNS(self, name, qname, attrs): ...
-    def endElementNS(self, name, qname): ...
-    def characters(self, content): ...
-    def ignorableWhitespace(self, whitespace): ...
-    def processingInstruction(self, target, data): ...
-    def skippedEntity(self, name): ...
+    def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ...
+    def startDocument(self) -> None: ...
+    def endDocument(self) -> None: ...
+    def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ...
+    def endPrefixMapping(self, prefix) -> None: ...
+    def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ...
+    def endElement(self, name: str) -> None: ...
+    def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ...
+    def endElementNS(self, name: tuple[str, str], qname: str) -> None: ...
+    def characters(self, content: str) -> None: ...
+    def ignorableWhitespace(self, whitespace: str) -> None: ...
+    def processingInstruction(self, target: str, data: str) -> None: ...
+    def skippedEntity(self, name: str) -> None: ...
 
 class DTDHandler:
     def notationDecl(self, name, publicId, systemId): ...
diff --git a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi
index 0d9223770c6a..06e03a1e4d06 100644
--- a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi
+++ b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi
@@ -2,7 +2,7 @@ from _typeshed import SupportsWrite
 from codecs import StreamReaderWriter, StreamWriter
 from collections.abc import Mapping
 from io import RawIOBase, TextIOBase
-from xml.sax import handler, xmlreader
+from xml.sax import _Source, handler, xmlreader
 
 def escape(data: str, entities: Mapping[str, str] = {}) -> str: ...
 def unescape(data: str, entities: Mapping[str, str] = {}) -> str: ...
@@ -15,46 +15,46 @@ class XMLGenerator(handler.ContentHandler):
         encoding: str = "iso-8859-1",
         short_empty_elements: bool = False,
     ) -> None: ...
-    def startDocument(self): ...
-    def endDocument(self): ...
-    def startPrefixMapping(self, prefix, uri): ...
-    def endPrefixMapping(self, prefix): ...
-    def startElement(self, name, attrs): ...
-    def endElement(self, name): ...
-    def startElementNS(self, name, qname, attrs): ...
-    def endElementNS(self, name, qname): ...
-    def characters(self, content): ...
-    def ignorableWhitespace(self, content): ...
-    def processingInstruction(self, target, data): ...
+    def startDocument(self) -> None: ...
+    def endDocument(self) -> None: ...
+    def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ...
+    def endPrefixMapping(self, prefix: str | None) -> None: ...
+    def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ...
+    def endElement(self, name: str) -> None: ...
+    def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ...
+    def endElementNS(self, name: tuple[str, str], qname: str) -> None: ...
+    def characters(self, content: str) -> None: ...
+    def ignorableWhitespace(self, content: str) -> None: ...
+    def processingInstruction(self, target: str, data: str) -> None: ...
 
 class XMLFilterBase(xmlreader.XMLReader):
     def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ...
     def error(self, exception): ...
     def fatalError(self, exception): ...
     def warning(self, exception): ...
-    def setDocumentLocator(self, locator): ...
-    def startDocument(self): ...
-    def endDocument(self): ...
-    def startPrefixMapping(self, prefix, uri): ...
-    def endPrefixMapping(self, prefix): ...
-    def startElement(self, name, attrs): ...
-    def endElement(self, name): ...
-    def startElementNS(self, name, qname, attrs): ...
-    def endElementNS(self, name, qname): ...
-    def characters(self, content): ...
-    def ignorableWhitespace(self, chars): ...
-    def processingInstruction(self, target, data): ...
-    def skippedEntity(self, name): ...
+    def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ...
+    def startDocument(self) -> None: ...
+    def endDocument(self) -> None: ...
+    def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ...
+    def endPrefixMapping(self, prefix: str | None) -> None: ...
+    def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ...
+    def endElement(self, name: str) -> None: ...
+    def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ...
+    def endElementNS(self, name: tuple[str, str], qname: str) -> None: ...
+    def characters(self, content: str) -> None: ...
+    def ignorableWhitespace(self, chars: str) -> None: ...
+    def processingInstruction(self, target: str, data: str) -> None: ...
+    def skippedEntity(self, name: str) -> None: ...
     def notationDecl(self, name, publicId, systemId): ...
     def unparsedEntityDecl(self, name, publicId, systemId, ndata): ...
     def resolveEntity(self, publicId, systemId): ...
-    def parse(self, source): ...
+    def parse(self, source: _Source) -> None: ...
     def setLocale(self, locale): ...
-    def getFeature(self, name): ...
-    def setFeature(self, name, state): ...
-    def getProperty(self, name): ...
-    def setProperty(self, name, value): ...
-    def getParent(self): ...
-    def setParent(self, parent): ...
+    def getFeature(self, name: str) -> object: ...
+    def setFeature(self, name: str, state: object) -> None: ...
+    def getProperty(self, name: str) -> object: ...
+    def setProperty(self, name: str, value: object) -> None: ...
+    def getParent(self) -> xmlreader.XMLReader: ...
+    def setParent(self, parent: xmlreader.XMLReader) -> None: ...
 
 def prepare_input_source(source, base=""): ...
diff --git a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi
index 0bf167b04a37..74d2efb010cd 100644
--- a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi
+++ b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi
@@ -1,20 +1,23 @@
 from collections.abc import Mapping
+from typing import overload
+from typing_extensions import Self, TypeAlias
+from xml.sax.handler import ContentHandler, DTDHandler, EntityResolver, ErrorHandler
 
 class XMLReader:
     def parse(self, source): ...
-    def getContentHandler(self): ...
-    def setContentHandler(self, handler): ...
-    def getDTDHandler(self): ...
-    def setDTDHandler(self, handler): ...
-    def getEntityResolver(self): ...
-    def setEntityResolver(self, resolver): ...
-    def getErrorHandler(self): ...
-    def setErrorHandler(self, handler): ...
+    def getContentHandler(self) -> ContentHandler: ...
+    def setContentHandler(self, handler: ContentHandler) -> None: ...
+    def getDTDHandler(self) -> DTDHandler: ...
+    def setDTDHandler(self, handler: DTDHandler) -> None: ...
+    def getEntityResolver(self) -> EntityResolver: ...
+    def setEntityResolver(self, resolver: EntityResolver) -> None: ...
+    def getErrorHandler(self) -> ErrorHandler: ...
+    def setErrorHandler(self, handler: ErrorHandler) -> None: ...
     def setLocale(self, locale): ...
-    def getFeature(self, name): ...
-    def setFeature(self, name, state): ...
-    def getProperty(self, name): ...
-    def setProperty(self, name, value): ...
+    def getFeature(self, name: str) -> object: ...
+    def setFeature(self, name: str, state: object) -> None: ...
+    def getProperty(self, name: str) -> object: ...
+    def setProperty(self, name: str, value: object) -> None: ...
 
 class IncrementalParser(XMLReader):
     def __init__(self, bufsize: int = 65536) -> None: ...
@@ -45,27 +48,40 @@ class InputSource:
 
 class AttributesImpl:
     def __init__(self, attrs: Mapping[str, str]) -> None: ...
-    def getLength(self): ...
-    def getType(self, name): ...
-    def getValue(self, name): ...
-    def getValueByQName(self, name): ...
-    def getNameByQName(self, name): ...
-    def getQNameByName(self, name): ...
-    def getNames(self): ...
-    def getQNames(self): ...
+    def getLength(self) -> int: ...
+    def getType(self, name: str) -> str: ...
+    def getValue(self, name: str) -> str: ...
+    def getValueByQName(self, name: str) -> str: ...
+    def getNameByQName(self, name: str) -> str: ...
+    def getQNameByName(self, name: str) -> str: ...
+    def getNames(self) -> list[str]: ...
+    def getQNames(self) -> list[str]: ...
     def __len__(self) -> int: ...
-    def __getitem__(self, name): ...
-    def keys(self): ...
-    def __contains__(self, name): ...
-    def get(self, name, alternative=None): ...
-    def copy(self): ...
-    def items(self): ...
-    def values(self): ...
+    def __getitem__(self, name: str) -> str: ...
+    def keys(self) -> list[str]: ...
+    def __contains__(self, name: str) -> bool: ...
+    @overload
+    def get(self, name: str, alternative: None = None) -> str | None: ...
+    @overload
+    def get(self, name: str, alternative: str) -> str: ...
+    def copy(self) -> Self: ...
+    def items(self) -> list[tuple[str, str]]: ...
+    def values(self) -> list[str]: ...
+
+_NSName: TypeAlias = tuple[str | None, str]
 
 class AttributesNSImpl(AttributesImpl):
-    def __init__(self, attrs: Mapping[tuple[str, str], str], qnames: Mapping[tuple[str, str], str]) -> None: ...
-    def getValueByQName(self, name): ...
-    def getNameByQName(self, name): ...
-    def getQNameByName(self, name): ...
-    def getQNames(self): ...
-    def copy(self): ...
+    def __init__(self, attrs: Mapping[_NSName, str], qnames: Mapping[_NSName, str]) -> None: ...
+    def getType(self, name: _NSName) -> str: ...  # type: ignore[override]
+    def getValue(self, name: _NSName) -> str: ...  # type: ignore[override]
+    def getNameByQName(self, name: str) -> _NSName: ...  # type: ignore[override]
+    def getQNameByName(self, name: _NSName) -> str: ...  # type: ignore[override]
+    def getNames(self) -> list[_NSName]: ...  # type: ignore[override]
+    def __getitem__(self, name: _NSName) -> str: ...  # type: ignore[override]
+    def keys(self) -> list[_NSName]: ...  # type: ignore[override]
+    def __contains__(self, name: _NSName) -> bool: ...  # type: ignore[override]
+    @overload  # type: ignore[override]
+    def get(self, name: _NSName, alternative: None = None) -> str | None: ...
+    @overload  # type: ignore[override]
+    def get(self, name: _NSName, alternative: str) -> str: ...
+    def items(self) -> list[tuple[_NSName, str]]: ...  # type: ignore[override]
diff --git a/mypy/typeshed/stdlib/xxlimited.pyi b/mypy/typeshed/stdlib/xxlimited.pyi
index b2fb72ad2c0b..d4f41bbaf22a 100644
--- a/mypy/typeshed/stdlib/xxlimited.pyi
+++ b/mypy/typeshed/stdlib/xxlimited.pyi
@@ -7,6 +7,8 @@ class Str: ...
 @final
 class Xxo:
     def demo(self) -> None: ...
+    if sys.version_info >= (3, 11) and sys.platform != "win32":
+        x_exports: int
 
 def foo(__i: int, __j: int) -> Any: ...
 def new() -> Xxo: ...

From 3d3e482e03c1efeaca9a1033acf06f56c1dfdf86 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sun, 1 Oct 2023 02:06:33 -0700
Subject: [PATCH 157/288] Fix cases of type object handling for overloads
 (#16168)

Fixes most of #12320. I didn't add tests for every code path because
it's niche. I also didn't fix everything, in particular the cases where
we proceed to use `ret_type`
---
 mypy/checker.py                    |  4 ++--
 mypy/checkexpr.py                  |  8 ++++----
 mypy/messages.py                   |  2 +-
 mypy/plugins/proper_plugin.py      |  3 +--
 mypy/typeops.py                    |  2 +-
 test-data/unit/check-abstract.test | 11 ++++++++++-
 test-data/unit/pythoneval.test     |  4 ++--
 7 files changed, 21 insertions(+), 13 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index bdb636541db0..1a7a7e25d525 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -2986,7 +2986,7 @@ def check_assignment(
                 p_rvalue_type = get_proper_type(rvalue_type)
                 p_lvalue_type = get_proper_type(lvalue_type)
                 if (
-                    isinstance(p_rvalue_type, CallableType)
+                    isinstance(p_rvalue_type, FunctionLike)
                     and p_rvalue_type.is_type_obj()
                     and (
                         p_rvalue_type.type_object().is_abstract
@@ -3771,7 +3771,7 @@ def split_around_star(
 
     def type_is_iterable(self, type: Type) -> bool:
         type = get_proper_type(type)
-        if isinstance(type, CallableType) and type.is_type_obj():
+        if isinstance(type, FunctionLike) and type.is_type_obj():
             type = type.fallback
         return is_subtype(
             type, self.named_generic_type("typing.Iterable", [AnyType(TypeOfAny.special_form)])
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index df4077100efb..e81fba9bc9ef 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -694,7 +694,7 @@ def check_runtime_protocol_test(self, e: CallExpr) -> None:
         for expr in mypy.checker.flatten(e.args[1]):
             tp = get_proper_type(self.chk.lookup_type(expr))
             if (
-                isinstance(tp, CallableType)
+                isinstance(tp, FunctionLike)
                 and tp.is_type_obj()
                 and tp.type_object().is_protocol
                 and not tp.type_object().runtime_protocol
@@ -704,7 +704,7 @@ def check_runtime_protocol_test(self, e: CallExpr) -> None:
     def check_protocol_issubclass(self, e: CallExpr) -> None:
         for expr in mypy.checker.flatten(e.args[1]):
             tp = get_proper_type(self.chk.lookup_type(expr))
-            if isinstance(tp, CallableType) and tp.is_type_obj() and tp.type_object().is_protocol:
+            if isinstance(tp, FunctionLike) and tp.is_type_obj() and tp.type_object().is_protocol:
                 attr_members = non_method_protocol_members(tp.type_object())
                 if attr_members:
                     self.chk.msg.report_non_method_protocol(tp.type_object(), attr_members, e)
@@ -4190,7 +4190,7 @@ def visit_index_with_type(
         elif isinstance(left_type, TypedDictType):
             return self.visit_typeddict_index_expr(left_type, e.index)
         elif (
-            isinstance(left_type, CallableType)
+            isinstance(left_type, FunctionLike)
             and left_type.is_type_obj()
             and left_type.type_object().is_enum
         ):
@@ -5832,7 +5832,7 @@ def has_abstract_type_part(self, caller_type: ProperType, callee_type: ProperTyp
 
     def has_abstract_type(self, caller_type: ProperType, callee_type: ProperType) -> bool:
         return (
-            isinstance(caller_type, CallableType)
+            isinstance(caller_type, FunctionLike)
             and isinstance(callee_type, TypeType)
             and caller_type.is_type_obj()
             and (caller_type.type_object().is_abstract or caller_type.type_object().is_protocol)
diff --git a/mypy/messages.py b/mypy/messages.py
index 47ebd94f3d21..5d03bf1babb9 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -416,7 +416,7 @@ def has_no_attr(
         elif member == "__getitem__":
             # Indexed get.
             # TODO: Fix this consistently in format_type
-            if isinstance(original_type, CallableType) and original_type.is_type_obj():
+            if isinstance(original_type, FunctionLike) and original_type.is_type_obj():
                 self.fail(
                     "The type {} is not generic and not indexable".format(
                         format_type(original_type, self.options)
diff --git a/mypy/plugins/proper_plugin.py b/mypy/plugins/proper_plugin.py
index ab93f0d126db..a1fd05272b65 100644
--- a/mypy/plugins/proper_plugin.py
+++ b/mypy/plugins/proper_plugin.py
@@ -17,7 +17,6 @@
 from mypy.subtypes import is_proper_subtype
 from mypy.types import (
     AnyType,
-    CallableType,
     FunctionLike,
     Instance,
     NoneTyp,
@@ -131,7 +130,7 @@ def is_dangerous_target(typ: ProperType) -> bool:
     """Is this a dangerous target (right argument) for an isinstance() check?"""
     if isinstance(typ, TupleType):
         return any(is_dangerous_target(get_proper_type(t)) for t in typ.items)
-    if isinstance(typ, CallableType) and typ.is_type_obj():
+    if isinstance(typ, FunctionLike) and typ.is_type_obj():
         return typ.type_object().has_base("mypy.types.Type")
     return False
 
diff --git a/mypy/typeops.py b/mypy/typeops.py
index 10efa32c4b91..37817933a397 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -989,7 +989,7 @@ def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool
         return any(custom_special_method(t, name) for t in typ.items)
     if isinstance(typ, TupleType):
         return custom_special_method(tuple_fallback(typ), name, check_all)
-    if isinstance(typ, CallableType) and typ.is_type_obj():
+    if isinstance(typ, FunctionLike) and typ.is_type_obj():
         # Look up __method__ on the metaclass for class objects.
         return custom_special_method(typ.fallback, name, check_all)
     if isinstance(typ, AnyType):
diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test
index 299074050baa..7f91eb8e7145 100644
--- a/test-data/unit/check-abstract.test
+++ b/test-data/unit/check-abstract.test
@@ -241,7 +241,7 @@ f(GoodAlias)
 
 [case testInstantiationAbstractsInTypeForVariables]
 # flags: --no-strict-optional
-from typing import Type
+from typing import Type, overload
 from abc import abstractmethod
 
 class A:
@@ -269,6 +269,15 @@ if int():
     var_old = B # E: Can only assign concrete classes to a variable of type "Type[A]"
 if int():
     var_old = C # OK
+
+class D(A):
+    @overload
+    def __new__(cls, a) -> "D": ...
+    @overload
+    def __new__(cls) -> "D": ...
+    def __new__(cls, a=None) -> "D": ...
+if int():
+    var = D # E: Can only assign concrete classes to a variable of type "Type[A]"
 [out]
 
 [case testInstantiationAbstractsInTypeForClassMethods]
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index c5be30eac1b7..3d8e8d09a5ad 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -1801,9 +1801,9 @@ C = str | int
 D: TypeAlias = str | int
 [out]
 _testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Invalid type alias: expression is not a valid type
-_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Value of type "Type[type]" is not indexable
+_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: The type "Type[type]" is not generic and not indexable
 _testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Invalid type alias: expression is not a valid type
-_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Value of type "Type[type]" is not indexable
+_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: The type "Type[type]" is not generic and not indexable
 _testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Invalid type alias: expression is not a valid type
 _testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Unsupported left operand type for | ("Type[str]")
 _testTypeAliasNotSupportedWithNewStyleUnion.py:6: error: Invalid type alias: expression is not a valid type

From 99ba048f4887eb0fbd55cde3f4243f6c177cbf7e Mon Sep 17 00:00:00 2001
From: Thomas Grainger <tagrain@gmail.com>
Date: Sun, 1 Oct 2023 13:56:12 -0700
Subject: [PATCH 158/288] tuple slice should not propagate fallback (#16154)

Fixes #8776
---
 mypy/checkexpr.py                 |  2 +-
 mypy/types.py                     |  9 +++++++--
 test-data/unit/check-literal.test |  9 +++++----
 test-data/unit/check-tuples.test  | 10 ++++++++++
 4 files changed, 23 insertions(+), 7 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index e81fba9bc9ef..a2141680b6cb 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -4271,7 +4271,7 @@ def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Typ
 
         items: list[Type] = []
         for b, e, s in itertools.product(begin, end, stride):
-            item = left_type.slice(b, e, s)
+            item = left_type.slice(b, e, s, fallback=self.named_type("builtins.tuple"))
             if item is None:
                 self.chk.fail(message_registry.AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE, slic)
                 return AnyType(TypeOfAny.from_error)
diff --git a/mypy/types.py b/mypy/types.py
index 9817043db6c2..34ea96be25ee 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -2416,7 +2416,12 @@ def copy_modified(
             items = self.items
         return TupleType(items, fallback, self.line, self.column)
 
-    def slice(self, begin: int | None, end: int | None, stride: int | None) -> TupleType | None:
+    def slice(
+        self, begin: int | None, end: int | None, stride: int | None, *, fallback: Instance | None
+    ) -> TupleType | None:
+        if fallback is None:
+            fallback = self.partial_fallback
+
         if any(isinstance(t, UnpackType) for t in self.items):
             total = len(self.items)
             unpack_index = find_unpack_in_list(self.items)
@@ -2462,7 +2467,7 @@ def slice(self, begin: int | None, end: int | None, stride: int | None) -> Tuple
                 return None
         else:
             slice_items = self.items[begin:end:stride]
-        return TupleType(slice_items, self.partial_fallback, self.line, self.column, self.implicit)
+        return TupleType(slice_items, fallback, self.line, self.column, self.implicit)
 
 
 class TypedDictType(ProperType):
diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test
index 08c709c6b777..d9ad68385ad1 100644
--- a/test-data/unit/check-literal.test
+++ b/test-data/unit/check-literal.test
@@ -1872,8 +1872,9 @@ reveal_type(tup2[idx3])       # N: Revealed type is "__main__.D"
 reveal_type(tup2[idx4])       # N: Revealed type is "__main__.E"
 reveal_type(tup2[idx_neg1])   # N: Revealed type is "__main__.E"
 tup2[idx5]                    # E: Tuple index out of range
-reveal_type(tup2[idx2:idx4])  # N: Revealed type is "Tuple[__main__.C, __main__.D, fallback=__main__.Tup2Class]"
-reveal_type(tup2[::idx2])     # N: Revealed type is "Tuple[__main__.A, __main__.C, __main__.E, fallback=__main__.Tup2Class]"
+reveal_type(tup2[idx2:idx4])  # N: Revealed type is "Tuple[__main__.C, __main__.D]"
+reveal_type(tup2[::idx2])     # N: Revealed type is "Tuple[__main__.A, __main__.C, __main__.E]"
+tup3: Tup2Class = tup2[:]     # E: Incompatible types in assignment (expression has type "Tuple[A, B, C, D, E]", variable has type "Tup2Class")
 [builtins fixtures/slice.pyi]
 
 [case testLiteralIntelligentIndexingTypedDict]
@@ -1977,8 +1978,8 @@ reveal_type(tup1[0::idx1])      # N: Revealed type is "Union[Tuple[__main__.A, _
 tup1[idx_bad]                   # E: Tuple index out of range
 
 reveal_type(tup2[idx1])         # N: Revealed type is "Union[__main__.B, __main__.C]"
-reveal_type(tup2[idx1:idx2])    # N: Revealed type is "Union[Tuple[__main__.B, __main__.C, fallback=__main__.Tup2Class], Tuple[__main__.B, __main__.C, __main__.D, fallback=__main__.Tup2Class], Tuple[__main__.C, fallback=__main__.Tup2Class], Tuple[__main__.C, __main__.D, fallback=__main__.Tup2Class]]"
-reveal_type(tup2[0::idx1])      # N: Revealed type is "Union[Tuple[__main__.A, __main__.B, __main__.C, __main__.D, __main__.E, fallback=__main__.Tup2Class], Tuple[__main__.A, __main__.C, __main__.E, fallback=__main__.Tup2Class]]"
+reveal_type(tup2[idx1:idx2])    # N: Revealed type is "Union[Tuple[__main__.B, __main__.C], Tuple[__main__.B, __main__.C, __main__.D], Tuple[__main__.C], Tuple[__main__.C, __main__.D]]"
+reveal_type(tup2[0::idx1])      # N: Revealed type is "Union[Tuple[__main__.A, __main__.B, __main__.C, __main__.D, __main__.E], Tuple[__main__.A, __main__.C, __main__.E]]"
 tup2[idx_bad]                   # E: Tuple index out of range
 [builtins fixtures/slice.pyi]
 [out]
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 9dfee38bc0c6..1447321c0c49 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -1681,3 +1681,13 @@ def g(t: Tuple):
     reveal_type(zip(*t))  # N: Revealed type is "typing.Iterator[builtins.tuple[Any, ...]]"
     reveal_type(zip(t))  # N: Revealed type is "typing.Iterator[Tuple[Any]]"
 [builtins fixtures/tuple.pyi]
+
+[case testTupleSubclassSlice]
+from typing import Tuple
+
+class A: ...
+
+class tuple_aa_subclass(Tuple[A, A]): ...
+
+inst_tuple_aa_subclass: tuple_aa_subclass = tuple_aa_subclass((A(), A()))[:]  # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "tuple_aa_subclass")
+[builtins fixtures/tuple.pyi]

From bcd4ff231554102a6698615882074e440ebfc3c9 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Sun, 1 Oct 2023 23:48:53 +0100
Subject: [PATCH 159/288] stubtest: hint when args in stub need to be
 keyword-only (#16210)

---
 mypy/stubtest.py | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index a5028581f7a1..e80ea4eac71f 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -890,7 +890,10 @@ def _verify_signature(
                 # If the variable is in runtime.kwonly, it's just mislabelled as not a
                 # keyword-only argument
                 if stub_arg.variable.name not in runtime.kwonly:
-                    yield f'runtime does not have argument "{stub_arg.variable.name}"'
+                    msg = f'runtime does not have argument "{stub_arg.variable.name}"'
+                    if runtime.varkw is not None:
+                        msg += ". Maybe you forgot to make it keyword-only in the stub?"
+                    yield msg
                 else:
                     yield f'stub argument "{stub_arg.variable.name}" is not keyword-only'
             if stub.varpos is not None:

From 96803e0817c751e82fe88695647e20a8f050dee9 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Tue, 3 Oct 2023 02:43:29 -0700
Subject: [PATCH 160/288] Add meta test for new diff logic (#16211)

Follow up to #16112
---
 mypy/test/helpers.py               | 23 +++++++++------
 mypy/test/meta/test_diff_helper.py | 47 ++++++++++++++++++++++++++++++
 2 files changed, 61 insertions(+), 9 deletions(-)
 create mode 100644 mypy/test/meta/test_diff_helper.py

diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py
index a53e16e27dfa..dc34931427ec 100644
--- a/mypy/test/helpers.py
+++ b/mypy/test/helpers.py
@@ -8,7 +8,7 @@
 import shutil
 import sys
 import time
-from typing import Any, Callable, Iterable, Iterator, Pattern
+from typing import IO, Any, Callable, Iterable, Iterator, Pattern
 
 # Exporting Suite as alias to TestCase for backwards compatibility
 # TODO: avoid aliasing - import and subclass TestCase directly
@@ -70,7 +70,12 @@ def diff_ranges(
 
 
 def render_diff_range(
-    ranges: list[tuple[int, int]], content: list[str], colour: str | None = None
+    ranges: list[tuple[int, int]],
+    content: list[str],
+    *,
+    colour: str | None = None,
+    output: IO[str] = sys.stderr,
+    indent: int = 2,
 ) -> None:
     for i, line_range in enumerate(ranges):
         is_matching = i % 2 == 1
@@ -83,20 +88,20 @@ def render_diff_range(
                 and j < len(lines) - 3
             ):
                 if j == 3:
-                    sys.stderr.write("  ...\n")
+                    output.write(" " * indent + "...\n")
                 continue
 
             if not is_matching and colour:
-                sys.stderr.write(colour)
+                output.write(colour)
 
-            sys.stderr.write("  " + line)
+            output.write(" " * indent + line)
 
             if not is_matching:
                 if colour:
-                    sys.stderr.write("\033[0m")
-                sys.stderr.write(" (diff)")
+                    output.write("\033[0m")
+                output.write(" (diff)")
 
-            sys.stderr.write("\n")
+            output.write("\n")
 
 
 def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) -> None:
@@ -129,7 +134,7 @@ def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str)
 
         sys.stderr.write(
             "Update the test output using --update-data -n0 "
-            "(you can additionally use the -k selector to update only specific tests)"
+            "(you can additionally use the -k selector to update only specific tests)\n"
         )
         pytest.fail(msg, pytrace=False)
 
diff --git a/mypy/test/meta/test_diff_helper.py b/mypy/test/meta/test_diff_helper.py
new file mode 100644
index 000000000000..047751fee1d2
--- /dev/null
+++ b/mypy/test/meta/test_diff_helper.py
@@ -0,0 +1,47 @@
+import io
+
+from mypy.test.helpers import Suite, diff_ranges, render_diff_range
+
+
+class DiffHelperSuite(Suite):
+    def test_render_diff_range(self) -> None:
+        expected = ["hello", "world"]
+        actual = ["goodbye", "world"]
+
+        expected_ranges, actual_ranges = diff_ranges(expected, actual)
+
+        output = io.StringIO()
+        render_diff_range(expected_ranges, expected, output=output)
+        assert output.getvalue() == "  hello (diff)\n  world\n"
+        output = io.StringIO()
+        render_diff_range(actual_ranges, actual, output=output)
+        assert output.getvalue() == "  goodbye (diff)\n  world\n"
+
+        expected = ["a", "b", "c", "d", "e", "f", "g", "h", "circle", "i", "j"]
+        actual = ["a", "b", "c", "d", "e", "f", "g", "h", "square", "i", "j"]
+
+        expected_ranges, actual_ranges = diff_ranges(expected, actual)
+
+        output = io.StringIO()
+        render_diff_range(expected_ranges, expected, output=output, indent=0)
+        assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\ncircle (diff)\ni\nj\n"
+        output = io.StringIO()
+        render_diff_range(actual_ranges, actual, output=output, indent=0)
+        assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\nsquare (diff)\ni\nj\n"
+
+    def test_diff_ranges(self) -> None:
+        a = ["hello", "world"]
+        b = ["hello", "world"]
+
+        assert diff_ranges(a, b) == (
+            [(0, 0), (0, 2), (2, 2), (2, 2)],
+            [(0, 0), (0, 2), (2, 2), (2, 2)],
+        )
+
+        a = ["hello", "world"]
+        b = ["goodbye", "world"]
+
+        assert diff_ranges(a, b) == (
+            [(0, 1), (1, 2), (2, 2), (2, 2)],
+            [(0, 1), (1, 2), (2, 2), (2, 2)],
+        )

From d839a0b1013873e27eae334a21b56fa57cd5e178 Mon Sep 17 00:00:00 2001
From: Eli Schwartz <eschwartz93@gmail.com>
Date: Wed, 4 Oct 2023 03:31:18 -0400
Subject: [PATCH 161/288] tests: avoid leaving artifacts in the source tree
 (#16201)

When running the mypy unittests, most of the time any output files are
produced into a temporary directory and cleaned up. In one case, it
wasn't. Fix this for test_capi.
---
 mypyc/lib-rt/setup.py       | 29 ++++++++++++++++++++-
 mypyc/test/test_external.py | 50 ++++++++++++++++++-------------------
 2 files changed, 53 insertions(+), 26 deletions(-)

diff --git a/mypyc/lib-rt/setup.py b/mypyc/lib-rt/setup.py
index a31b705cd723..ef81b794c9bd 100644
--- a/mypyc/lib-rt/setup.py
+++ b/mypyc/lib-rt/setup.py
@@ -5,7 +5,10 @@
 
 from __future__ import annotations
 
+import os
+import subprocess
 import sys
+from distutils.command.build_ext import build_ext
 from distutils.core import Extension, setup
 from typing import Any
 
@@ -17,6 +20,30 @@
     kwargs = {}
     compile_args = ["--std=c++11"]
 
+
+class build_ext_custom(build_ext):
+    def get_library_names(self):
+        return ["gtest"]
+
+    def run(self):
+        gtest_dir = os.path.abspath(
+            os.path.join(os.path.dirname(__file__), "..", "external", "googletest")
+        )
+
+        os.makedirs(self.build_temp, exist_ok=True)
+
+        # Build Google Test, the C++ framework we use for testing C code.
+        # The source code for Google Test is copied to this repository.
+        subprocess.check_call(
+            ["make", "-f", os.path.join(gtest_dir, "make", "Makefile"), f"GTEST_DIR={gtest_dir}"],
+            cwd=self.build_temp,
+        )
+
+        self.library_dirs = [self.build_temp]
+
+        return build_ext.run(self)
+
+
 setup(
     name="test_capi",
     version="0.1",
@@ -34,10 +61,10 @@
             ],
             depends=["CPy.h", "mypyc_util.h", "pythonsupport.h"],
             extra_compile_args=["-Wno-unused-function", "-Wno-sign-compare"] + compile_args,
-            library_dirs=["../external/googletest/make"],
             libraries=["gtest"],
             include_dirs=["../external/googletest", "../external/googletest/include"],
             **kwargs,
         )
     ],
+    cmdclass={"build_ext": build_ext_custom},
 )
diff --git a/mypyc/test/test_external.py b/mypyc/test/test_external.py
index 6deabd81255e..22eb8019133c 100644
--- a/mypyc/test/test_external.py
+++ b/mypyc/test/test_external.py
@@ -5,6 +5,7 @@
 import os
 import subprocess
 import sys
+import tempfile
 import unittest
 
 base_dir = os.path.join(os.path.dirname(__file__), "..", "..")
@@ -16,34 +17,33 @@ class TestExternal(unittest.TestCase):
     @unittest.skipIf(sys.platform.startswith("win"), "rt tests don't work on windows")
     def test_c_unit_test(self) -> None:
         """Run C unit tests in a subprocess."""
-        # Build Google Test, the C++ framework we use for testing C code.
-        # The source code for Google Test is copied to this repository.
         cppflags: list[str] = []
         env = os.environ.copy()
         if sys.platform == "darwin":
             cppflags += ["-mmacosx-version-min=10.10", "-stdlib=libc++"]
         env["CPPFLAGS"] = " ".join(cppflags)
-        subprocess.check_call(
-            ["make", "libgtest.a"],
-            env=env,
-            cwd=os.path.join(base_dir, "mypyc", "external", "googletest", "make"),
-        )
         # Build Python wrapper for C unit tests.
-        env = os.environ.copy()
-        env["CPPFLAGS"] = " ".join(cppflags)
-        status = subprocess.check_call(
-            [sys.executable, "setup.py", "build_ext", "--inplace"],
-            env=env,
-            cwd=os.path.join(base_dir, "mypyc", "lib-rt"),
-        )
-        # Run C unit tests.
-        env = os.environ.copy()
-        if "GTEST_COLOR" not in os.environ:
-            env["GTEST_COLOR"] = "yes"  # Use fancy colors
-        status = subprocess.call(
-            [sys.executable, "-c", "import sys, test_capi; sys.exit(test_capi.run_tests())"],
-            env=env,
-            cwd=os.path.join(base_dir, "mypyc", "lib-rt"),
-        )
-        if status != 0:
-            raise AssertionError("make test: C unit test failure")
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            status = subprocess.check_call(
+                [
+                    sys.executable,
+                    "setup.py",
+                    "build_ext",
+                    f"--build-lib={tmpdir}",
+                    f"--build-temp={tmpdir}",
+                ],
+                env=env,
+                cwd=os.path.join(base_dir, "mypyc", "lib-rt"),
+            )
+            # Run C unit tests.
+            env = os.environ.copy()
+            if "GTEST_COLOR" not in os.environ:
+                env["GTEST_COLOR"] = "yes"  # Use fancy colors
+            status = subprocess.call(
+                [sys.executable, "-c", "import sys, test_capi; sys.exit(test_capi.run_tests())"],
+                env=env,
+                cwd=tmpdir,
+            )
+            if status != 0:
+                raise AssertionError("make test: C unit test failure")

From b1ba661122dc39d9bbc53cf5df334c9f56b1a729 Mon Sep 17 00:00:00 2001
From: Anthony Sottile <asottile@umich.edu>
Date: Wed, 4 Oct 2023 03:43:37 -0400
Subject: [PATCH 162/288] __qualname__ and __module__ are available in class
 bodies (#16215)

Resolves #10570
Resolves #6473
---
 mypy/semanal.py                   | 5 ++++-
 test-data/unit/check-classes.test | 8 ++++++++
 2 files changed, 12 insertions(+), 1 deletion(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index 6e103e5d382c..a476b62b31ec 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -5569,7 +5569,7 @@ def lookup(
             if not suppress_errors:
                 self.name_not_defined(name, ctx)
             return None
-        # 2. Class attributes (if within class definition)
+        # 2a. Class attributes (if within class definition)
         if self.type and not self.is_func_scope() and name in self.type.names:
             node = self.type.names[name]
             if not node.implicit:
@@ -5579,6 +5579,9 @@ def lookup(
                 # Defined through self.x assignment
                 implicit_name = True
                 implicit_node = node
+        # 2b. Class attributes __qualname__ and __module__
+        if self.type and not self.is_func_scope() and name in {"__qualname__", "__module__"}:
+            return SymbolTableNode(MDEF, Var(name, self.str_type()))
         # 3. Local (function) scopes
         for table in reversed(self.locals):
             if table is not None and name in table:
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
index 4bc1e50f7be9..cd60ec7c9a9c 100644
--- a/test-data/unit/check-classes.test
+++ b/test-data/unit/check-classes.test
@@ -8001,3 +8001,11 @@ f5(1)  # E: Argument 1 to "f5" has incompatible type "int"; expected "Integral"
        # N: Types from "numbers" aren't supported for static type checking \
        # N: See https://peps.python.org/pep-0484/#the-numeric-tower \
        # N: Consider using a protocol instead, such as typing.SupportsFloat
+
+[case testImplicitClassScopedNames]
+class C:
+    reveal_type(__module__)  # N: Revealed type is "builtins.str"
+    reveal_type(__qualname__)  # N: Revealed type is "builtins.str"
+    def f(self) -> None:
+        __module__  # E: Name "__module__" is not defined
+        __qualname__  # E: Name "__qualname__" is not defined

From a1df3353a7bc0d7ff7b3459e95d0f9684b325e9b Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Wed, 4 Oct 2023 14:49:05 +0100
Subject: [PATCH 163/288] Bump ruff and black to their latest versions (#16221)

Closes #16218
---
 .pre-commit-config.yaml | 4 ++--
 mypy/build.py           | 2 +-
 mypy/main.py            | 2 +-
 mypy/metastore.py       | 2 +-
 mypy/plugins/common.py  | 2 +-
 pyproject.toml          | 1 +
 setup.py                | 2 +-
 test-requirements.txt   | 4 ++--
 8 files changed, 10 insertions(+), 9 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 8650a2868cd6..e92d498fa3cc 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,11 +6,11 @@ repos:
       - id: trailing-whitespace
       - id: end-of-file-fixer
   - repo: https://github.com/psf/black-pre-commit-mirror
-    rev: 23.7.0  # must match test-requirements.txt
+    rev: 23.9.1  # must match test-requirements.txt
     hooks:
       - id: black
   - repo: https://github.com/astral-sh/ruff-pre-commit
-    rev: v0.0.281  # must match test-requirements.txt
+    rev: v0.0.292  # must match test-requirements.txt
     hooks:
       - id: ruff
         args: [--exit-non-zero-on-fix]
diff --git a/mypy/build.py b/mypy/build.py
index 39629c2dc455..b481cc6ad0dc 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -3024,7 +3024,7 @@ def dump_graph(graph: Graph, stdout: TextIO | None = None) -> None:
             if state.path:
                 try:
                     size = os.path.getsize(state.path)
-                except os.error:
+                except OSError:
                     pass
             node.sizes[mod] = size
             for dep in state.dependencies:
diff --git a/mypy/main.py b/mypy/main.py
index 3eb8a76a6de3..dff1a0362ba2 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -33,7 +33,7 @@
 def stat_proxy(path: str) -> os.stat_result:
     try:
         st = orig_stat(path)
-    except os.error as err:
+    except OSError as err:
         print(f"stat({path!r}) -> {err}")
         raise
     else:
diff --git a/mypy/metastore.py b/mypy/metastore.py
index 16cbd5adc9c8..0547f94cd671 100644
--- a/mypy/metastore.py
+++ b/mypy/metastore.py
@@ -112,7 +112,7 @@ def write(self, name: str, data: str, mtime: float | None = None) -> bool:
             if mtime is not None:
                 os.utime(path, times=(mtime, mtime))
 
-        except os.error:
+        except OSError:
             return False
         return True
 
diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py
index 84d50b7086c6..03041bfcebcd 100644
--- a/mypy/plugins/common.py
+++ b/mypy/plugins/common.py
@@ -154,7 +154,7 @@ def find_shallow_matching_overload_item(overload: Overloaded, call: CallExpr) ->
                 ):
                     ok = False
                     break
-                elif isinstance(arg_type, LiteralType) and type(arg_type.value) is bool:
+                elif isinstance(arg_type, LiteralType) and isinstance(arg_type.value, bool):
                     if not any(parse_bool(arg) == arg_type.value for arg in args):
                         ok = False
                         break
diff --git a/pyproject.toml b/pyproject.toml
index 1d6562756e22..de32618f1a39 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -58,6 +58,7 @@ unfixable = [
     "F601",  # automatic fix might obscure issue
     "F602",  # automatic fix might obscure issue
     "B018",  # automatic fix might obscure issue
+    "UP036", # sometimes it's better to just noqa this
 ]
 
 extend-exclude = [
diff --git a/setup.py b/setup.py
index bbb655ea4537..7e7793a406d0 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@
 import sys
 from typing import TYPE_CHECKING, Any
 
-if sys.version_info < (3, 8, 0):
+if sys.version_info < (3, 8, 0):  # noqa: UP036
     sys.stderr.write("ERROR: You need Python 3.8 or later to use mypy.\n")
     exit(1)
 
diff --git a/test-requirements.txt b/test-requirements.txt
index 6f7bec0375ad..bdaad16fa88e 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,7 +1,7 @@
 -r mypy-requirements.txt
 -r build-requirements.txt
 attrs>=18.0
-black==23.7.0  # must match version in .pre-commit-config.yaml
+black==23.9.1  # must match version in .pre-commit-config.yaml
 filelock>=3.3.0
 # lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014
 lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12'
@@ -11,6 +11,6 @@ psutil>=4.0
 pytest>=7.4.0
 pytest-xdist>=1.34.0
 pytest-cov>=2.10.0
-ruff==0.0.280  # must match version in .pre-commit-config.yaml
+ruff==0.0.292  # must match version in .pre-commit-config.yaml
 setuptools>=65.5.1
 tomli>=1.1.0  # needed even on py311+ so the self check passes with --python-version 3.7

From 10dfafe089a75dc117586ebab35723da66309398 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Wed, 4 Oct 2023 18:58:28 +0100
Subject: [PATCH 164/288] Remove stubs packages from `stubinfo.py` where the
 runtime package has added a `py.typed` file (#16226)

All of these stubs packages have been removed from typeshed, due to the
runtime package having added a `py.typed` file.
---
 mypy/stubinfo.py               |  9 ---------
 test-data/unit/pythoneval.test | 12 ++++++------
 2 files changed, 6 insertions(+), 15 deletions(-)

diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py
index 0d76a6215238..9d8dfbe43f37 100644
--- a/mypy/stubinfo.py
+++ b/mypy/stubinfo.py
@@ -26,17 +26,14 @@ def stub_distribution_name(prefix: str) -> str:
     "croniter": "types-croniter",
     "dataclasses": "types-dataclasses",
     "dateparser": "types-dateparser",
-    "datetimerange": "types-DateTimeRange",
     "dateutil": "types-python-dateutil",
     "decorator": "types-decorator",
     "deprecated": "types-Deprecated",
     "docutils": "types-docutils",
     "first": "types-first",
-    "geoip2": "types-geoip2",
     "gflags": "types-python-gflags",
     "google.protobuf": "types-protobuf",
     "markdown": "types-Markdown",
-    "maxminddb": "types-maxminddb",
     "mock": "types-mock",
     "OpenSSL": "types-pyOpenSSL",
     "paramiko": "types-paramiko",
@@ -80,8 +77,6 @@ def stub_distribution_name(prefix: str) -> str:
     "PIL": "types-Pillow",
     "PyInstaller": "types-pyinstaller",
     "Xlib": "types-python-xlib",
-    "annoy": "types-annoy",
-    "appdirs": "types-appdirs",
     "aws_xray_sdk": "types-aws-xray-sdk",
     "babel": "types-babel",
     "backports.ssl_match_hostname": "types-backports.ssl_match_hostname",
@@ -96,7 +91,6 @@ def stub_distribution_name(prefix: str) -> str:
     "consolemenu": "types-console-menu",
     "crontab": "types-python-crontab",
     "d3dshot": "types-D3DShot",
-    "dj_database_url": "types-dj-database-url",
     "dockerfile_parse": "types-dockerfile-parse",
     "docopt": "types-docopt",
     "editdistance": "types-editdistance",
@@ -111,7 +105,6 @@ def stub_distribution_name(prefix: str) -> str:
     "flake8_typing_imports": "types-flake8-typing-imports",
     "flask_cors": "types-Flask-Cors",
     "flask_migrate": "types-Flask-Migrate",
-    "flask_sqlalchemy": "types-Flask-SQLAlchemy",
     "fpdf": "types-fpdf2",
     "gdb": "types-gdb",
     "google.cloud.ndb": "types-google-cloud-ndb",
@@ -162,7 +155,6 @@ def stub_distribution_name(prefix: str) -> str:
     "tree_sitter": "types-tree-sitter",
     "tree_sitter_languages": "types-tree-sitter-languages",
     "ttkthemes": "types-ttkthemes",
-    "urllib3": "types-urllib3",
     "vobject": "types-vobject",
     "whatthepatch": "types-whatthepatch",
     "win32": "types-pywin32",
@@ -172,7 +164,6 @@ def stub_distribution_name(prefix: str) -> str:
     "win32comext": "types-pywin32",
     "win32gui": "types-pywin32",
     "xmltodict": "types-xmltodict",
-    "xxhash": "types-xxhash",
     "zxcvbn": "types-zxcvbn",
     # Stub packages that are not from typeshed
     # Since these can be installed automatically via --install-types, we have a high trust bar
diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index 3d8e8d09a5ad..7dd2b2f76f8c 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -1568,24 +1568,24 @@ note: A user-defined top-level module with name "typing" is not supported
 # flags: --ignore-missing-imports
 import scribe  # No Python 3 stubs available for scribe
 from scribe import x
-import maxminddb  # Python 3 stubs available for maxminddb
+import docutils  # Python 3 stubs available for docutils
 import foobar_asdf
 import jack  # This has a stubs package but was never bundled with mypy, so ignoring works
 [out]
-_testIgnoreImportIfNoPython3StubAvailable.py:4: error: Library stubs not installed for "maxminddb"
-_testIgnoreImportIfNoPython3StubAvailable.py:4: note: Hint: "python3 -m pip install types-maxminddb"
+_testIgnoreImportIfNoPython3StubAvailable.py:4: error: Library stubs not installed for "docutils"
+_testIgnoreImportIfNoPython3StubAvailable.py:4: note: Hint: "python3 -m pip install types-docutils"
 _testIgnoreImportIfNoPython3StubAvailable.py:4: note: (or run "mypy --install-types" to install all missing stub packages)
 _testIgnoreImportIfNoPython3StubAvailable.py:4: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
 
 [case testNoPython3StubAvailable]
 import scribe
 from scribe import x
-import maxminddb
+import docutils
 [out]
 _testNoPython3StubAvailable.py:1: error: Cannot find implementation or library stub for module named "scribe"
 _testNoPython3StubAvailable.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
-_testNoPython3StubAvailable.py:3: error: Library stubs not installed for "maxminddb"
-_testNoPython3StubAvailable.py:3: note: Hint: "python3 -m pip install types-maxminddb"
+_testNoPython3StubAvailable.py:3: error: Library stubs not installed for "docutils"
+_testNoPython3StubAvailable.py:3: note: Hint: "python3 -m pip install types-docutils"
 _testNoPython3StubAvailable.py:3: note: (or run "mypy --install-types" to install all missing stub packages)
 
 

From d54e8b30301620ce5cc59a0c304b8423f07a7b60 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sun, 8 Oct 2023 19:32:51 +0100
Subject: [PATCH 165/288] Support variadic tuple packing/unpacking (#16205)

This is includes also related things such as tuple concatenation,
special-cased tuple "re-packing", and star tuple unpacking in
homogeneous collections.

It looks like we are very close to the finish line (the only major
missing feature is type narrowing using `len()`, apart from this I just
need to do couple technical things, and make one final search for missed
code paths).

Some notes:
* Unfortunately, star items on l.h.s create lists at runtime. This means
there are various cases where `list[object]` is the best type we can
have.
* Note I now infer "precise" types for expressions like `(x, *y, z)`,
where `y` is say `tuple[int, ...]`. This may cause errors for code that
previously worked (when we will turn this feature on). For example `(1,
*[], 2)[42]` will be an error. As usual, I propose to try to be strict,
and relax if people will complain (FWIW, I expect very few false
positives from this).
* It may look like `Unpack` can now "leak" if it was never used
explicitly. This is not the case, it is just that experimental features
are enabled in tests.
* There are couple minor changes that affect code without variadic
types. Previously tuple type context was used inconsistently for
situations with star unpacks, I clean it up a bit (for my tests). Also I
infer `Any`-like l.h.s types after an error in tuple unpacking (when
needed) to avoid extra "Cannot determine type" errors in my tests.
---
 mypy/argmap.py                          |  23 ++-
 mypy/checker.py                         | 107 ++++++++++++-
 mypy/checkexpr.py                       | 113 +++++++++++++-
 mypy/constraints.py                     |   6 +-
 mypy/message_registry.py                |   3 +
 mypyc/irbuild/mapper.py                 |   8 +-
 test-data/unit/check-tuples.test        |   8 +-
 test-data/unit/check-typevar-tuple.test | 192 ++++++++++++++++++++++++
 8 files changed, 437 insertions(+), 23 deletions(-)

diff --git a/mypy/argmap.py b/mypy/argmap.py
index ec8463fd0625..e6700c9f1092 100644
--- a/mypy/argmap.py
+++ b/mypy/argmap.py
@@ -14,6 +14,8 @@
     Type,
     TypedDictType,
     TypeOfAny,
+    TypeVarTupleType,
+    UnpackType,
     get_proper_type,
 )
 
@@ -174,6 +176,7 @@ def expand_actual_type(
         actual_kind: nodes.ArgKind,
         formal_name: str | None,
         formal_kind: nodes.ArgKind,
+        allow_unpack: bool = False,
     ) -> Type:
         """Return the actual (caller) type(s) of a formal argument with the given kinds.
 
@@ -189,6 +192,11 @@ def expand_actual_type(
         original_actual = actual_type
         actual_type = get_proper_type(actual_type)
         if actual_kind == nodes.ARG_STAR:
+            if isinstance(actual_type, TypeVarTupleType):
+                # This code path is hit when *Ts is passed to a callable and various
+                # special-handling didn't catch this. The best thing we can do is to use
+                # the upper bound.
+                actual_type = get_proper_type(actual_type.upper_bound)
             if isinstance(actual_type, Instance) and actual_type.args:
                 from mypy.subtypes import is_subtype
 
@@ -209,7 +217,20 @@ def expand_actual_type(
                     self.tuple_index = 1
                 else:
                     self.tuple_index += 1
-                return actual_type.items[self.tuple_index - 1]
+                item = actual_type.items[self.tuple_index - 1]
+                if isinstance(item, UnpackType) and not allow_unpack:
+                    # An upack item that doesn't have special handling, use upper bound as above.
+                    unpacked = get_proper_type(item.type)
+                    if isinstance(unpacked, TypeVarTupleType):
+                        fallback = get_proper_type(unpacked.upper_bound)
+                    else:
+                        fallback = unpacked
+                    assert (
+                        isinstance(fallback, Instance)
+                        and fallback.type.fullname == "builtins.tuple"
+                    )
+                    item = fallback.args[0]
+                return item
             elif isinstance(actual_type, ParamSpecType):
                 # ParamSpec is valid in *args but it can't be unpacked.
                 return actual_type
diff --git a/mypy/checker.py b/mypy/checker.py
index 1a7a7e25d525..e1b65a95ae98 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -205,10 +205,13 @@
     TypeType,
     TypeVarId,
     TypeVarLikeType,
+    TypeVarTupleType,
     TypeVarType,
     UnboundType,
     UninhabitedType,
     UnionType,
+    UnpackType,
+    find_unpack_in_list,
     flatten_nested_unions,
     get_proper_type,
     get_proper_types,
@@ -3430,6 +3433,37 @@ def is_assignable_slot(self, lvalue: Lvalue, typ: Type | None) -> bool:
             return all(self.is_assignable_slot(lvalue, u) for u in typ.items)
         return False
 
+    def flatten_rvalues(self, rvalues: list[Expression]) -> list[Expression]:
+        """Flatten expression list by expanding those * items that have tuple type.
+
+        For each regular type item in the tuple type use a TempNode(), for an Unpack
+        item use a corresponding StarExpr(TempNode()).
+        """
+        new_rvalues = []
+        for rv in rvalues:
+            if not isinstance(rv, StarExpr):
+                new_rvalues.append(rv)
+                continue
+            typ = get_proper_type(self.expr_checker.accept(rv.expr))
+            if not isinstance(typ, TupleType):
+                new_rvalues.append(rv)
+                continue
+            for t in typ.items:
+                if not isinstance(t, UnpackType):
+                    new_rvalues.append(TempNode(t))
+                else:
+                    unpacked = get_proper_type(t.type)
+                    if isinstance(unpacked, TypeVarTupleType):
+                        fallback = unpacked.upper_bound
+                    else:
+                        assert (
+                            isinstance(unpacked, Instance)
+                            and unpacked.type.fullname == "builtins.tuple"
+                        )
+                        fallback = unpacked
+                    new_rvalues.append(StarExpr(TempNode(fallback)))
+        return new_rvalues
+
     def check_assignment_to_multiple_lvalues(
         self,
         lvalues: list[Lvalue],
@@ -3439,18 +3473,16 @@ def check_assignment_to_multiple_lvalues(
     ) -> None:
         if isinstance(rvalue, (TupleExpr, ListExpr)):
             # Recursively go into Tuple or List expression rhs instead of
-            # using the type of rhs, because this allowed more fine grained
+            # using the type of rhs, because this allows more fine-grained
             # control in cases like: a, b = [int, str] where rhs would get
             # type List[object]
             rvalues: list[Expression] = []
             iterable_type: Type | None = None
             last_idx: int | None = None
-            for idx_rval, rval in enumerate(rvalue.items):
+            for idx_rval, rval in enumerate(self.flatten_rvalues(rvalue.items)):
                 if isinstance(rval, StarExpr):
                     typs = get_proper_type(self.expr_checker.accept(rval.expr))
-                    if isinstance(typs, TupleType):
-                        rvalues.extend([TempNode(typ) for typ in typs.items])
-                    elif self.type_is_iterable(typs) and isinstance(typs, Instance):
+                    if self.type_is_iterable(typs) and isinstance(typs, Instance):
                         if iterable_type is not None and iterable_type != self.iterable_item_type(
                             typs, rvalue
                         ):
@@ -3517,8 +3549,32 @@ def check_assignment_to_multiple_lvalues(
             self.check_multi_assignment(lvalues, rvalue, context, infer_lvalue_type)
 
     def check_rvalue_count_in_assignment(
-        self, lvalues: list[Lvalue], rvalue_count: int, context: Context
+        self,
+        lvalues: list[Lvalue],
+        rvalue_count: int,
+        context: Context,
+        rvalue_unpack: int | None = None,
     ) -> bool:
+        if rvalue_unpack is not None:
+            if not any(isinstance(e, StarExpr) for e in lvalues):
+                self.fail("Variadic tuple unpacking requires a star target", context)
+                return False
+            if len(lvalues) > rvalue_count:
+                self.fail(message_registry.TOO_MANY_TARGETS_FOR_VARIADIC_UNPACK, context)
+                return False
+            left_star_index = next(i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr))
+            left_prefix = left_star_index
+            left_suffix = len(lvalues) - left_star_index - 1
+            right_prefix = rvalue_unpack
+            right_suffix = rvalue_count - rvalue_unpack - 1
+            if left_suffix > right_suffix or left_prefix > right_prefix:
+                # Case of asymmetric unpack like:
+                #     rv: tuple[int, *Ts, int, int]
+                #     x, y, *xs, z = rv
+                # it is technically valid, but is tricky to reason about.
+                # TODO: support this (at least if the r.h.s. unpack is a homogeneous tuple).
+                self.fail(message_registry.TOO_MANY_TARGETS_FOR_VARIADIC_UNPACK, context)
+            return True
         if any(isinstance(lvalue, StarExpr) for lvalue in lvalues):
             if len(lvalues) - 1 > rvalue_count:
                 self.msg.wrong_number_values_to_unpack(rvalue_count, len(lvalues) - 1, context)
@@ -3552,6 +3608,13 @@ def check_multi_assignment(
             if len(relevant_items) == 1:
                 rvalue_type = get_proper_type(relevant_items[0])
 
+        if (
+            isinstance(rvalue_type, TupleType)
+            and find_unpack_in_list(rvalue_type.items) is not None
+        ):
+            # Normalize for consistent handling with "old-style" homogeneous tuples.
+            rvalue_type = expand_type(rvalue_type, {})
+
         if isinstance(rvalue_type, AnyType):
             for lv in lvalues:
                 if isinstance(lv, StarExpr):
@@ -3663,7 +3726,10 @@ def check_multi_assignment_from_tuple(
         undefined_rvalue: bool,
         infer_lvalue_type: bool = True,
     ) -> None:
-        if self.check_rvalue_count_in_assignment(lvalues, len(rvalue_type.items), context):
+        rvalue_unpack = find_unpack_in_list(rvalue_type.items)
+        if self.check_rvalue_count_in_assignment(
+            lvalues, len(rvalue_type.items), context, rvalue_unpack=rvalue_unpack
+        ):
             star_index = next(
                 (i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues)
             )
@@ -3708,12 +3774,37 @@ def check_multi_assignment_from_tuple(
                 self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type)
             if star_lv:
                 list_expr = ListExpr(
-                    [self.temp_node(rv_type, context) for rv_type in star_rv_types]
+                    [
+                        self.temp_node(rv_type, context)
+                        if not isinstance(rv_type, UnpackType)
+                        else StarExpr(self.temp_node(rv_type.type, context))
+                        for rv_type in star_rv_types
+                    ]
                 )
                 list_expr.set_line(context)
                 self.check_assignment(star_lv.expr, list_expr, infer_lvalue_type)
             for lv, rv_type in zip(right_lvs, right_rv_types):
                 self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type)
+        else:
+            # Store meaningful Any types for lvalues, errors are already given
+            # by check_rvalue_count_in_assignment()
+            if infer_lvalue_type:
+                for lv in lvalues:
+                    if (
+                        isinstance(lv, NameExpr)
+                        and isinstance(lv.node, Var)
+                        and lv.node.type is None
+                    ):
+                        lv.node.type = AnyType(TypeOfAny.from_error)
+                    elif isinstance(lv, StarExpr):
+                        if (
+                            isinstance(lv.expr, NameExpr)
+                            and isinstance(lv.expr.node, Var)
+                            and lv.expr.node.type is None
+                        ):
+                            lv.expr.node.type = self.named_generic_type(
+                                "builtins.list", [AnyType(TypeOfAny.from_error)]
+                            )
 
     def lvalue_type_for_inference(self, lvalues: list[Lvalue], rvalue_type: TupleType) -> Type:
         star_index = next(
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index a2141680b6cb..fd155ff87379 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -95,6 +95,7 @@
     YieldExpr,
     YieldFromExpr,
 )
+from mypy.options import TYPE_VAR_TUPLE
 from mypy.plugin import (
     FunctionContext,
     FunctionSigContext,
@@ -2510,7 +2511,11 @@ def check_argument_types(
                     )
                     self.msg.invalid_keyword_var_arg(actual_type, is_mapping, context)
                 expanded_actual = mapper.expand_actual_type(
-                    actual_type, actual_kind, callee.arg_names[i], callee_arg_kind
+                    actual_type,
+                    actual_kind,
+                    callee.arg_names[i],
+                    callee_arg_kind,
+                    allow_unpack=isinstance(callee_arg_type, UnpackType),
                 )
                 check_arg(
                     expanded_actual,
@@ -3338,7 +3343,45 @@ def visit_op_expr(self, e: OpExpr) -> Type:
                 if isinstance(proper_right_type, TupleType):
                     right_radd_method = proper_right_type.partial_fallback.type.get("__radd__")
                     if right_radd_method is None:
-                        return self.concat_tuples(proper_left_type, proper_right_type)
+                        # One cannot have two variadic items in the same tuple.
+                        if (
+                            find_unpack_in_list(proper_left_type.items) is None
+                            or find_unpack_in_list(proper_right_type.items) is None
+                        ):
+                            return self.concat_tuples(proper_left_type, proper_right_type)
+                elif (
+                    TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature
+                    and isinstance(proper_right_type, Instance)
+                    and self.chk.type_is_iterable(proper_right_type)
+                ):
+                    # Handle tuple[X, Y] + tuple[Z, ...] = tuple[X, Y, *tuple[Z, ...]].
+                    right_radd_method = proper_right_type.type.get("__radd__")
+                    if (
+                        right_radd_method is None
+                        and proper_left_type.partial_fallback.type.fullname == "builtins.tuple"
+                        and find_unpack_in_list(proper_left_type.items) is None
+                    ):
+                        item_type = self.chk.iterable_item_type(proper_right_type, e)
+                        mapped = self.chk.named_generic_type("builtins.tuple", [item_type])
+                        return proper_left_type.copy_modified(
+                            items=proper_left_type.items + [UnpackType(mapped)]
+                        )
+        if TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature:
+            # Handle tuple[X, ...] + tuple[Y, Z] = tuple[*tuple[X, ...], Y, Z].
+            if (
+                e.op == "+"
+                and isinstance(proper_left_type, Instance)
+                and proper_left_type.type.fullname == "builtins.tuple"
+            ):
+                proper_right_type = get_proper_type(self.accept(e.right))
+                if (
+                    isinstance(proper_right_type, TupleType)
+                    and proper_right_type.partial_fallback.type.fullname == "builtins.tuple"
+                    and find_unpack_in_list(proper_right_type.items) is None
+                ):
+                    return proper_right_type.copy_modified(
+                        items=[UnpackType(proper_left_type)] + proper_right_type.items
+                    )
 
         if e.op in operators.op_methods:
             method = operators.op_methods[e.op]
@@ -4721,6 +4764,19 @@ def check_lst_expr(self, e: ListExpr | SetExpr | TupleExpr, fullname: str, tag:
         )[0]
         return remove_instance_last_known_values(out)
 
+    def tuple_context_matches(self, expr: TupleExpr, ctx: TupleType) -> bool:
+        ctx_unpack_index = find_unpack_in_list(ctx.items)
+        if ctx_unpack_index is None:
+            # For fixed tuples accept everything that can possibly match, even if this
+            # requires all star items to be empty.
+            return len([e for e in expr.items if not isinstance(e, StarExpr)]) <= len(ctx.items)
+        # For variadic context, the only easy case is when structure matches exactly.
+        # TODO: try using tuple type context in more cases.
+        if len([e for e in expr.items if not isinstance(e, StarExpr)]) != 1:
+            return False
+        expr_star_index = next(i for i, lv in enumerate(expr.items) if isinstance(lv, StarExpr))
+        return len(expr.items) == len(ctx.items) and ctx_unpack_index == expr_star_index
+
     def visit_tuple_expr(self, e: TupleExpr) -> Type:
         """Type check a tuple expression."""
         # Try to determine type context for type inference.
@@ -4730,7 +4786,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type:
             tuples_in_context = [
                 t
                 for t in get_proper_types(type_context.items)
-                if (isinstance(t, TupleType) and len(t.items) == len(e.items))
+                if (isinstance(t, TupleType) and self.tuple_context_matches(e, t))
                 or is_named_instance(t, TUPLE_LIKE_INSTANCE_NAMES)
             ]
             if len(tuples_in_context) == 1:
@@ -4740,7 +4796,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type:
                 # more than one.  Either way, we can't decide on a context.
                 pass
 
-        if isinstance(type_context, TupleType):
+        if isinstance(type_context, TupleType) and self.tuple_context_matches(e, type_context):
             type_context_items = type_context.items
         elif type_context and is_named_instance(type_context, TUPLE_LIKE_INSTANCE_NAMES):
             assert isinstance(type_context, Instance)
@@ -4751,6 +4807,11 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type:
         # items that match a position in e, and we'll worry about type
         # mismatches later.
 
+        unpack_in_context = False
+        if type_context_items is not None:
+            unpack_in_context = find_unpack_in_list(type_context_items) is not None
+        seen_unpack_in_items = False
+
         # Infer item types.  Give up if there's a star expression
         # that's not a Tuple.
         items: list[Type] = []
@@ -4763,12 +4824,44 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type:
                 # TupleExpr, flatten it, so we can benefit from the
                 # context?  Counterargument: Why would anyone write
                 # (1, *(2, 3)) instead of (1, 2, 3) except in a test?
-                tt = self.accept(item.expr)
+                if unpack_in_context:
+                    # Note: this logic depends on full structure match in tuple_context_matches().
+                    assert type_context_items
+                    ctx_item = type_context_items[j]
+                    assert isinstance(ctx_item, UnpackType)
+                    ctx = ctx_item.type
+                else:
+                    ctx = None
+                tt = self.accept(item.expr, ctx)
                 tt = get_proper_type(tt)
                 if isinstance(tt, TupleType):
+                    if find_unpack_in_list(tt.items) is not None:
+                        if seen_unpack_in_items:
+                            # Multiple unpack items are not allowed in tuples,
+                            # fall back to instance type.
+                            return self.check_lst_expr(e, "builtins.tuple", "<tuple>")
+                        else:
+                            seen_unpack_in_items = True
                     items.extend(tt.items)
-                    j += len(tt.items)
+                    # Note: this logic depends on full structure match in tuple_context_matches().
+                    if unpack_in_context:
+                        j += 1
+                    else:
+                        # If there is an unpack in expressions, but not in context, this will
+                        # result in an error later, just do something predictable here.
+                        j += len(tt.items)
                 else:
+                    if (
+                        TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature
+                        and not seen_unpack_in_items
+                    ):
+                        # Handle (x, *y, z), where y is e.g. tuple[Y, ...].
+                        if isinstance(tt, Instance) and self.chk.type_is_iterable(tt):
+                            item_type = self.chk.iterable_item_type(tt, e)
+                            mapped = self.chk.named_generic_type("builtins.tuple", [item_type])
+                            items.append(UnpackType(mapped))
+                            seen_unpack_in_items = True
+                            continue
                     # A star expression that's not a Tuple.
                     # Treat the whole thing as a variable-length tuple.
                     return self.check_lst_expr(e, "builtins.tuple", "<tuple>")
@@ -4781,7 +4874,13 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type:
                 items.append(tt)
         # This is a partial fallback item type. A precise type will be calculated on demand.
         fallback_item = AnyType(TypeOfAny.special_form)
-        return TupleType(items, self.chk.named_generic_type("builtins.tuple", [fallback_item]))
+        result: ProperType = TupleType(
+            items, self.chk.named_generic_type("builtins.tuple", [fallback_item])
+        )
+        if seen_unpack_in_items:
+            # Return already normalized tuple type just in case.
+            result = expand_type(result, {})
+        return result
 
     def fast_dict_type(self, e: DictExpr) -> Type | None:
         """
diff --git a/mypy/constraints.py b/mypy/constraints.py
index ebd6765e8e82..58d0f4dbed29 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -156,7 +156,11 @@ def infer_constraints_for_callable(
                     continue
 
                 expanded_actual = mapper.expand_actual_type(
-                    actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i]
+                    actual_arg_type,
+                    arg_kinds[actual],
+                    callee.arg_names[i],
+                    callee.arg_kinds[i],
+                    allow_unpack=True,
                 )
 
                 if arg_kinds[actual] != ARG_STAR or isinstance(
diff --git a/mypy/message_registry.py b/mypy/message_registry.py
index d75a1fab1b66..dc46eb503390 100644
--- a/mypy/message_registry.py
+++ b/mypy/message_registry.py
@@ -84,6 +84,9 @@ def with_additional_msg(self, info: str) -> ErrorMessage:
 MUST_HAVE_NONE_RETURN_TYPE: Final = ErrorMessage('The return type of "{}" must be None')
 TUPLE_INDEX_OUT_OF_RANGE: Final = ErrorMessage("Tuple index out of range")
 AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE: Final = ErrorMessage("Ambiguous slice of a variadic tuple")
+TOO_MANY_TARGETS_FOR_VARIADIC_UNPACK: Final = ErrorMessage(
+    "Too many assignment targets for variadic unpack"
+)
 INVALID_SLICE_INDEX: Final = ErrorMessage("Slice index must be an integer, SupportsIndex or None")
 CANNOT_INFER_LAMBDA_TYPE: Final = ErrorMessage("Cannot infer type of lambda")
 CANNOT_ACCESS_INIT: Final = (
diff --git a/mypyc/irbuild/mapper.py b/mypyc/irbuild/mapper.py
index 5b77b4b1537b..a3abbb1f84fb 100644
--- a/mypyc/irbuild/mapper.py
+++ b/mypyc/irbuild/mapper.py
@@ -19,6 +19,7 @@
     UnboundType,
     UninhabitedType,
     UnionType,
+    find_unpack_in_list,
     get_proper_type,
 )
 from mypyc.ir.class_ir import ClassIR
@@ -112,8 +113,11 @@ def type_to_rtype(self, typ: Type | None) -> RType:
                 return object_rprimitive
         elif isinstance(typ, TupleType):
             # Use our unboxed tuples for raw tuples but fall back to
-            # being boxed for NamedTuple.
-            if typ.partial_fallback.type.fullname == "builtins.tuple":
+            # being boxed for NamedTuple or for variadic tuples.
+            if (
+                typ.partial_fallback.type.fullname == "builtins.tuple"
+                and find_unpack_in_list(typ.items) is None
+            ):
                 return RTuple([self.type_to_rtype(t) for t in typ.items])
             else:
                 return tuple_rprimitive
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 1447321c0c49..76225360a7c1 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -1100,15 +1100,15 @@ reveal_type(b)  # N: Revealed type is "Tuple[builtins.int, builtins.int, builtin
 [case testTupleWithStarExpr2]
 a = [1]
 b = (0, *a)
-reveal_type(b)  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+reveal_type(b)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]"
 [builtins fixtures/tuple.pyi]
 
 [case testTupleWithStarExpr3]
 a = ['']
 b = (0, *a)
-reveal_type(b)  # N: Revealed type is "builtins.tuple[builtins.object, ...]"
+reveal_type(b)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.str, ...]]]"
 c = (*a, '')
-reveal_type(c)  # N: Revealed type is "builtins.tuple[builtins.str, ...]"
+reveal_type(c)  # N: Revealed type is "Tuple[Unpack[builtins.tuple[builtins.str, ...]], builtins.str]"
 [builtins fixtures/tuple.pyi]
 
 [case testTupleWithStarExpr4]
@@ -1333,7 +1333,7 @@ reveal_type(subtup if int() else tup2)  # N: Revealed type is "builtins.tuple[bu
 [case testTupleWithUndersizedContext]
 a = ([1], 'x')
 if int():
-    a = ([], 'x', 1)  # E: Incompatible types in assignment (expression has type "Tuple[List[int], str, int]", variable has type "Tuple[List[int], str]")
+    a = ([], 'x', 1)  # E: Incompatible types in assignment (expression has type "Tuple[List[Never], str, int]", variable has type "Tuple[List[int], str]")
 [builtins fixtures/tuple.pyi]
 
 [case testTupleWithOversizedContext]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 850b7ef8a524..0212518bdec0 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -1527,6 +1527,198 @@ x = c1
 x = c2
 [builtins fixtures/tuple.pyi]
 
+[case testUnpackingVariadicTuplesTypeVar]
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+def foo(arg: Tuple[int, Unpack[Ts], str]) -> None:
+    x1, y1, z1 = arg  # E: Variadic tuple unpacking requires a star target
+    reveal_type(x1)  # N: Revealed type is "Any"
+    reveal_type(y1)  # N: Revealed type is "Any"
+    reveal_type(z1)  # N: Revealed type is "Any"
+    x2, *y2, z2 = arg
+    reveal_type(x2)  # N: Revealed type is "builtins.int"
+    reveal_type(y2)  # N: Revealed type is "builtins.list[builtins.object]"
+    reveal_type(z2)  # N: Revealed type is "builtins.str"
+    x3, *y3 = arg
+    reveal_type(x3)  # N: Revealed type is "builtins.int"
+    reveal_type(y3)  # N: Revealed type is "builtins.list[builtins.object]"
+    *y4, z4 = arg
+    reveal_type(y4)  # N: Revealed type is "builtins.list[builtins.object]"
+    reveal_type(z4)  # N: Revealed type is "builtins.str"
+    x5, xx5, *y5, z5, zz5 = arg  # E: Too many assignment targets for variadic unpack
+    reveal_type(x5)  # N: Revealed type is "Any"
+    reveal_type(xx5)  # N: Revealed type is "Any"
+    reveal_type(y5)  # N: Revealed type is "builtins.list[Any]"
+    reveal_type(z5)  # N: Revealed type is "Any"
+    reveal_type(zz5)  # N: Revealed type is "Any"
+[builtins fixtures/tuple.pyi]
+
+[case testUnpackingVariadicTuplesHomogeneous]
+from typing import Tuple
+from typing_extensions import Unpack
+
+def bar(arg: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None:
+    x1, y1, z1 = arg  # E: Variadic tuple unpacking requires a star target
+    reveal_type(x1)  # N: Revealed type is "Any"
+    reveal_type(y1)  # N: Revealed type is "Any"
+    reveal_type(z1)  # N: Revealed type is "Any"
+    x2, *y2, z2 = arg
+    reveal_type(x2)  # N: Revealed type is "builtins.int"
+    reveal_type(y2)  # N: Revealed type is "builtins.list[builtins.float]"
+    reveal_type(z2)  # N: Revealed type is "builtins.str"
+    x3, *y3 = arg
+    reveal_type(x3)  # N: Revealed type is "builtins.int"
+    reveal_type(y3)  # N: Revealed type is "builtins.list[builtins.object]"
+    *y4, z4 = arg
+    reveal_type(y4)  # N: Revealed type is "builtins.list[builtins.float]"
+    reveal_type(z4)  # N: Revealed type is "builtins.str"
+    x5, xx5, *y5, z5, zz5 = arg  # E: Too many assignment targets for variadic unpack
+    reveal_type(x5)  # N: Revealed type is "Any"
+    reveal_type(xx5)  # N: Revealed type is "Any"
+    reveal_type(y5)  # N: Revealed type is "builtins.list[Any]"
+    reveal_type(z5)  # N: Revealed type is "Any"
+    reveal_type(zz5)  # N: Revealed type is "Any"
+[builtins fixtures/tuple.pyi]
+
+[case testRepackingVariadicTuplesTypeVar]
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+def foo(arg: Tuple[int, Unpack[Ts], str]) -> None:
+    x1, *y1, z1 = *arg,
+    reveal_type(x1)  # N: Revealed type is "builtins.int"
+    reveal_type(y1)  # N: Revealed type is "builtins.list[builtins.object]"
+    reveal_type(z1)  # N: Revealed type is "builtins.str"
+    x2, *y2, z2 = 1, *arg, 2
+    reveal_type(x2)  # N: Revealed type is "builtins.int"
+    reveal_type(y2)  # N: Revealed type is "builtins.list[builtins.object]"
+    reveal_type(z2)  # N: Revealed type is "builtins.int"
+    x3, *y3 = *arg, 42
+    reveal_type(x3)  # N: Revealed type is "builtins.int"
+    reveal_type(y3)  # N: Revealed type is "builtins.list[builtins.object]"
+    *y4, z4 = 42, *arg
+    reveal_type(y4)  # N: Revealed type is "builtins.list[builtins.object]"
+    reveal_type(z4)  # N: Revealed type is "builtins.str"
+    x5, xx5, *y5, z5, zz5 = 1, *arg, 2
+    reveal_type(x5)  # N: Revealed type is "builtins.int"
+    reveal_type(xx5)  # N: Revealed type is "builtins.int"
+    reveal_type(y5)  # N: Revealed type is "builtins.list[builtins.object]"
+    reveal_type(z5)  # N: Revealed type is "builtins.str"
+    reveal_type(zz5)  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]
+
+[case testRepackingVariadicTuplesHomogeneous]
+from typing import Tuple
+from typing_extensions import Unpack
+
+def foo(arg: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None:
+    x1, *y1, z1 = *arg,
+    reveal_type(x1)  # N: Revealed type is "builtins.int"
+    reveal_type(y1)  # N: Revealed type is "builtins.list[builtins.float]"
+    reveal_type(z1)  # N: Revealed type is "builtins.str"
+    x2, *y2, z2 = 1, *arg, 2
+    reveal_type(x2)  # N: Revealed type is "builtins.int"
+    reveal_type(y2)  # N: Revealed type is "builtins.list[builtins.object]"
+    reveal_type(z2)  # N: Revealed type is "builtins.int"
+    x3, *y3 = *arg, 42
+    reveal_type(x3)  # N: Revealed type is "builtins.int"
+    reveal_type(y3)  # N: Revealed type is "builtins.list[builtins.object]"
+    *y4, z4 = 42, *arg
+    reveal_type(y4)  # N: Revealed type is "builtins.list[builtins.float]"
+    reveal_type(z4)  # N: Revealed type is "builtins.str"
+    x5, xx5, *y5, z5, zz5 = 1, *arg, 2
+    reveal_type(x5)  # N: Revealed type is "builtins.int"
+    reveal_type(xx5)  # N: Revealed type is "builtins.int"
+    reveal_type(y5)  # N: Revealed type is "builtins.list[builtins.float]"
+    reveal_type(z5)  # N: Revealed type is "builtins.str"
+    reveal_type(zz5)  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]
+
+[case testPackingVariadicTuplesTypeVar]
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+def foo(arg: Tuple[int, Unpack[Ts], str]) -> None:
+    x = *arg,
+    reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+    y = 1, *arg, 2
+    reveal_type(y)  # N: Revealed type is "Tuple[builtins.int, builtins.int, Unpack[Ts`-1], builtins.str, builtins.int]"
+    z = (*arg, *arg)
+    reveal_type(z)  # N: Revealed type is "builtins.tuple[builtins.object, ...]"
+[builtins fixtures/tuple.pyi]
+
+[case testPackingVariadicTuplesHomogeneous]
+from typing import Tuple
+from typing_extensions import Unpack
+
+a: Tuple[float, ...]
+b: Tuple[int, Unpack[Tuple[float, ...]], str]
+
+x = *a,
+reveal_type(x)  # N: Revealed type is "builtins.tuple[builtins.float, ...]"
+y = 1, *a, 2
+reveal_type(y)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.int]"
+z = (*a, *a)
+reveal_type(z)  # N: Revealed type is "builtins.tuple[builtins.float, ...]"
+
+x2 = *b,
+reveal_type(x2)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+y2 = 1, *b, 2
+reveal_type(y2)  # N: Revealed type is "Tuple[builtins.int, builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str, builtins.int]"
+z2 = (*b, *b)
+reveal_type(z2)  # N: Revealed type is "builtins.tuple[builtins.object, ...]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicTupleInListSetExpr]
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+vt: Tuple[int, Unpack[Tuple[float, ...]], int]
+reveal_type([1, *vt])  # N: Revealed type is "builtins.list[builtins.float]"
+reveal_type({1, *vt})  # N: Revealed type is "builtins.set[builtins.float]"
+
+Ts = TypeVarTuple("Ts")
+def foo(arg: Tuple[int, Unpack[Ts], str]) -> None:
+    reveal_type([1, *arg])  # N: Revealed type is "builtins.list[builtins.object]"
+    reveal_type({1, *arg})  # N: Revealed type is "builtins.set[builtins.object]"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testVariadicTupleInTupleContext]
+from typing import Tuple, Optional
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+def test(x: Optional[Tuple[Unpack[Ts]]] = None) -> Tuple[Unpack[Ts]]: ...
+
+vt: Tuple[int, Unpack[Tuple[float, ...]], int]
+vt = 1, *test(), 2  # OK, type context is used
+vt2 = 1, *test(), 2  # E: Need type annotation for "vt2"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicTupleConcatenation]
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+vtf: Tuple[float, ...]
+vt: Tuple[int, Unpack[Tuple[float, ...]], int]
+
+reveal_type(vt + (1, 2))  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.int, Literal[1]?, Literal[2]?]"
+reveal_type((1, 2) + vt)  # N: Revealed type is "Tuple[Literal[1]?, Literal[2]?, builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.int]"
+reveal_type(vt + vt)  # N: Revealed type is "builtins.tuple[builtins.float, ...]"
+reveal_type(vtf + (1, 2))  # N: Revealed type is "Tuple[Unpack[builtins.tuple[builtins.float, ...]], Literal[1]?, Literal[2]?]"
+reveal_type((1, 2) + vtf)  # N: Revealed type is "Tuple[Literal[1]?, Literal[2]?, Unpack[builtins.tuple[builtins.float, ...]]]"
+
+Ts = TypeVarTuple("Ts")
+def foo(arg: Tuple[int, Unpack[Ts], str]) -> None:
+    reveal_type(arg + (1, 2))  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str, Literal[1]?, Literal[2]?]"
+    reveal_type((1, 2) + arg)  # N: Revealed type is "Tuple[Literal[1]?, Literal[2]?, builtins.int, Unpack[Ts`-1], builtins.str]"
+    reveal_type(arg + arg)  # N: Revealed type is "builtins.tuple[builtins.object, ...]"
+[builtins fixtures/tuple.pyi]
+
 [case testTypeVarTupleAnyOverload]
 from typing import Any, Generic, overload, Tuple
 from typing_extensions import TypeVarTuple, Unpack

From 3c7bdb22407dea87039e9fd3c551df157794c9f0 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sun, 8 Oct 2023 21:36:58 +0300
Subject: [PATCH 166/288] Use SPDX license identifier (#16230)

It does not change the license itself, only its identifier in
`setup.py`, so external tools can read it better.

Full list: https://spdx.org/licenses/
Closes https://github.com/python/mypy/issues/16228
---
 setup.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setup.py b/setup.py
index 7e7793a406d0..5dba26fb10e0 100644
--- a/setup.py
+++ b/setup.py
@@ -202,7 +202,7 @@ def run(self):
     author="Jukka Lehtosalo",
     author_email="jukka.lehtosalo@iki.fi",
     url="https://www.mypy-lang.org/",
-    license="MIT License",
+    license="MIT",
     py_modules=[],
     ext_modules=ext_modules,
     packages=find_packages(),

From ff7ac75387d3b5c7d0eaa4573bf2a0723bf3a3fc Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sun, 8 Oct 2023 11:39:01 -0700
Subject: [PATCH 167/288] Add an extra for mypyc dependencies (#16229)

Fixes #15579
---
 mypyc/doc/getting_started.rst | 6 +++---
 setup.py                      | 1 +
 2 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/mypyc/doc/getting_started.rst b/mypyc/doc/getting_started.rst
index 2db8aae149ec..adc617419ffa 100644
--- a/mypyc/doc/getting_started.rst
+++ b/mypyc/doc/getting_started.rst
@@ -38,17 +38,17 @@ Installation
 ------------
 
 Mypyc is shipped as part of the mypy distribution. Install mypy like
-this (you need Python 3.5 or later):
+this (you need Python 3.8 or later):
 
 .. code-block::
 
-    $ python3 -m pip install -U mypy
+    $ python3 -m pip install -U 'mypy[mypyc]'
 
 On some systems you need to use this instead:
 
 .. code-block::
 
-    $ python -m pip install -U mypy
+    $ python -m pip install -U 'mypy[mypyc]'
 
 Example program
 ---------------
diff --git a/setup.py b/setup.py
index 5dba26fb10e0..dcbdc96b3ccf 100644
--- a/setup.py
+++ b/setup.py
@@ -227,6 +227,7 @@ def run(self):
     # Same here.
     extras_require={
         "dmypy": "psutil >= 4.0",
+        "mypyc": "setuptools >= 50",
         "python2": "",
         "reports": "lxml",
         "install-types": "pip",

From e87b62fcda423a9cd6db9076f66459fe47491568 Mon Sep 17 00:00:00 2001
From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com>
Date: Tue, 10 Oct 2023 00:00:29 +1000
Subject: [PATCH 168/288] =?UTF-8?q?(=F0=9F=8E=81)=20drop=20'dev'=20from=20?=
 =?UTF-8?q?3.12=20in=20the=20CI=20(#16239)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Co-authored-by: KotlinIsland <kotlinisland@users.noreply.github.com>
---
 .github/workflows/test.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 3bcd9e059589..afa5d5823ea9 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -64,7 +64,7 @@ jobs:
           tox_extra_args: "-n 2"
           test_mypyc: true
         - name: Test suite with py312-ubuntu, mypyc-compiled
-          python: '3.12-dev'
+          python: '3.12'
           arch: x64
           os: ubuntu-latest
           toxenv: py

From 8b6d21373f44959d8aa194723e871e5468ad5c71 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Mon, 9 Oct 2023 23:33:18 -0700
Subject: [PATCH 169/288] Fix partially defined in the case of missing type
 maps (#15995)

Thanks AlexWaygood for sending me on this adventure. This took me a
while for me to debug!

When we don't need to warn about unreachable code, we don't end up
calling `self.is_noop_for_reachability(s)` (which is meant to tell us
whether the code should be warned about or is `raise AssertionError` or
`typing.assert_never(never)` or something).
https://github.com/python/mypy/blob/6f650cff9ab21f81069e0ae30c92eae94219ea63/mypy/checker.py#L2748

This innocuous check has a side effect that turns out to be important
for the partially undefined checks. These checks work by reaching into
the type map populated by the checker. But if we never actually ended up
analysing the code, we never populate the type map.

This therefore changes things to assume that if we couldn't find the
expression in the type map, it's probably because it was unreachable.
---
 mypy/partially_defined.py                    |  2 +-
 test-data/unit/check-possibly-undefined.test | 17 +++++++++++++++++
 2 files changed, 18 insertions(+), 1 deletion(-)

diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py
index 47cbd671f168..b7f577110fa8 100644
--- a/mypy/partially_defined.py
+++ b/mypy/partially_defined.py
@@ -506,7 +506,7 @@ def visit_break_stmt(self, o: BreakStmt) -> None:
         self.tracker.skip_branch()
 
     def visit_expression_stmt(self, o: ExpressionStmt) -> None:
-        if isinstance(self.type_map.get(o.expr, None), UninhabitedType):
+        if isinstance(self.type_map.get(o.expr, None), (UninhabitedType, type(None))):
             self.tracker.skip_branch()
         super().visit_expression_stmt(o)
 
diff --git a/test-data/unit/check-possibly-undefined.test b/test-data/unit/check-possibly-undefined.test
index ebceef88b537..ae277949c049 100644
--- a/test-data/unit/check-possibly-undefined.test
+++ b/test-data/unit/check-possibly-undefined.test
@@ -1026,3 +1026,20 @@ class B:
         else:
             # Same as above but in a loop.
             b = a  # E: Name "a" may be undefined
+
+[case testUnreachableCausingMissingTypeMap]
+# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def --no-warn-unreachable
+# Regression test for https://github.com/python/mypy/issues/15958
+from typing import Union, NoReturn
+
+def assert_never(__x: NoReturn) -> NoReturn: ...
+
+def foo(x: Union[int, str]) -> None:
+    if isinstance(x, str):
+        f = "foo"
+    elif isinstance(x, int):
+        f = "bar"
+    else:
+        assert_never(x)
+    f  # OK
+[builtins fixtures/tuple.pyi]

From 2c1009ed7cde0247c859bbccb852490b8c91bd97 Mon Sep 17 00:00:00 2001
From: Valentin Stanciu <250871+svalentin@users.noreply.github.com>
Date: Thu, 12 Oct 2023 11:58:18 +0100
Subject: [PATCH 170/288] show dmypy errors post serving (#16250)

After dmypy starts serving, stdout and stderr gets captured. If we have
an error, we assume we can send it to the client. However, if we have an
error outside of client communication, that error is lost. The easiest
way to see this is to run dmypy in daemonize mode, run a check once,
then Control-C to send a KeyboardInterrupt. That exception is not
printed though it should. After this change you can clearly see it.

```
term1$ python3 -m mypy.dmypy daemon

term2$ python3 -m mypy.dmypy check -v test.py
[... some output ...]

term1$ [Control-C]
^CTraceback (most recent call last):
  File "/home/svalentin/src/mypy-svalentin/mypy/dmypy_server.py", line 220, in serve
    with server:
  File "/home/svalentin/src/mypy-svalentin/mypy/ipc.py", line 232, in __enter__
    self.connection, _ = self.sock.accept()
  File "/usr/lib/python3.8/socket.py", line 292, in accept
    fd, addr = self._accept()
KeyboardInterrupt
Traceback (most recent call last):
  File "/usr/lib/python3.8/runpy.py", line 194, in _run_module_as_main
    return _run_code(code, main_globals, None,
  File "/usr/lib/python3.8/runpy.py", line 87, in _run_code
    exec(code, run_globals)
  File "/home/svalentin/src/mypy-svalentin/mypy/dmypy/__main__.py", line 6, in <module>
    console_entry()
  File "/home/svalentin/src/mypy-svalentin/mypy/dmypy/client.py", line 748, in console_entry
    main(sys.argv[1:])
  File "/home/svalentin/src/mypy-svalentin/mypy/dmypy/client.py", line 275, in main
    args.action(args)
  File "/home/svalentin/src/mypy-svalentin/mypy/dmypy/client.py", line 629, in do_daemon
    Server(options, args.status_file, timeout=args.timeout).serve()
  File "/home/svalentin/src/mypy-svalentin/mypy/dmypy_server.py", line 220, in serve
    with server:
  File "/home/svalentin/src/mypy-svalentin/mypy/ipc.py", line 232, in __enter__
    self.connection, _ = self.sock.accept()
  File "/usr/lib/python3.8/socket.py", line 292, in accept
    fd, addr = self._accept()
KeyboardInterrupt
```
---
 mypy/dmypy_server.py | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py
index a50ebc5415ba..faa9a23fadfb 100644
--- a/mypy/dmypy_server.py
+++ b/mypy/dmypy_server.py
@@ -210,6 +210,8 @@ def serve(self) -> None:
         """Serve requests, synchronously (no thread or fork)."""
         command = None
         server = IPCServer(CONNECTION_NAME, self.timeout)
+        orig_stdout = sys.stdout
+        orig_stderr = sys.stderr
         try:
             with open(self.status_file, "w") as f:
                 json.dump({"pid": os.getpid(), "connection_name": server.connection_name}, f)
@@ -252,6 +254,10 @@ def serve(self) -> None:
                         reset_global_state()
                         sys.exit(0)
         finally:
+            # Revert stdout/stderr so we can see any errors.
+            sys.stdout = orig_stdout
+            sys.stderr = orig_stderr
+
             # If the final command is something other than a clean
             # stop, remove the status file. (We can't just
             # simplify the logic and always remove the file, since

From 72605dc12a89b9c12a502ebfad494b4b9d9b5160 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 12 Oct 2023 21:25:30 +0100
Subject: [PATCH 171/288] Fix crash on ParamSpec unification (#16251)

Fixes https://github.com/python/mypy/issues/16245
Fixes https://github.com/python/mypy/issues/16248

Unfortunately I was a bit reckless with parentheses, but in my defense
`unify_generic_callable()` is kind of broken for long time, as it can
return "solutions" like ```{1: T`1}```. We need a more principled
approach there (IIRC there is already an issue about this in the scope
of `--new-type-inference`).

(The fix is quite trivial so I am not going to wait for review too long
to save time, unless there will be some issues in `mypy_primer` etc.)
---
 mypy/expandtype.py                            | 10 +++--
 mypy/types.py                                 | 10 -----
 .../unit/check-parameter-specification.test   | 37 +++++++++++++++++++
 3 files changed, 43 insertions(+), 14 deletions(-)

diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index b233561e19c2..4acb51e22268 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -241,7 +241,7 @@ def visit_param_spec(self, t: ParamSpecType) -> Type:
             return repl.copy_modified(
                 flavor=t.flavor,
                 prefix=t.prefix.copy_modified(
-                    arg_types=self.expand_types(t.prefix.arg_types + repl.prefix.arg_types),
+                    arg_types=self.expand_types(t.prefix.arg_types) + repl.prefix.arg_types,
                     arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds,
                     arg_names=t.prefix.arg_names + repl.prefix.arg_names,
                 ),
@@ -249,7 +249,7 @@ def visit_param_spec(self, t: ParamSpecType) -> Type:
         elif isinstance(repl, Parameters):
             assert t.flavor == ParamSpecFlavor.BARE
             return Parameters(
-                self.expand_types(t.prefix.arg_types + repl.arg_types),
+                self.expand_types(t.prefix.arg_types) + repl.arg_types,
                 t.prefix.arg_kinds + repl.arg_kinds,
                 t.prefix.arg_names + repl.arg_names,
                 variables=[*t.prefix.variables, *repl.variables],
@@ -333,12 +333,14 @@ def visit_callable_type(self, t: CallableType) -> CallableType:
             # the replacement is ignored.
             if isinstance(repl, Parameters):
                 # We need to expand both the types in the prefix and the ParamSpec itself
-                t = t.expand_param_spec(repl)
                 return t.copy_modified(
-                    arg_types=self.expand_types(t.arg_types),
+                    arg_types=self.expand_types(t.arg_types[:-2]) + repl.arg_types,
+                    arg_kinds=t.arg_kinds[:-2] + repl.arg_kinds,
+                    arg_names=t.arg_names[:-2] + repl.arg_names,
                     ret_type=t.ret_type.accept(self),
                     type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None),
                     imprecise_arg_kinds=(t.imprecise_arg_kinds or repl.imprecise_arg_kinds),
+                    variables=[*repl.variables, *t.variables],
                 )
             elif isinstance(repl, ParamSpecType):
                 # We're substituting one ParamSpec for another; this can mean that the prefix
diff --git a/mypy/types.py b/mypy/types.py
index 34ea96be25ee..09ba68aae88a 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -2069,16 +2069,6 @@ def param_spec(self) -> ParamSpecType | None:
         prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2])
         return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix)
 
-    def expand_param_spec(self, c: Parameters) -> CallableType:
-        variables = c.variables
-        return self.copy_modified(
-            arg_types=self.arg_types[:-2] + c.arg_types,
-            arg_kinds=self.arg_kinds[:-2] + c.arg_kinds,
-            arg_names=self.arg_names[:-2] + c.arg_names,
-            is_ellipsis_args=c.is_ellipsis_args,
-            variables=[*variables, *self.variables],
-        )
-
     def with_unpacked_kwargs(self) -> NormalizedCallableType:
         if not self.unpack_kwargs:
             return cast(NormalizedCallableType, self)
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index da831d29dd43..bb7859070f00 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1976,3 +1976,40 @@ g(cb, y=0, x='a')  # OK
 g(cb, y='a', x=0)  # E: Argument "y" to "g" has incompatible type "str"; expected "int" \
                    # E: Argument "x" to "g" has incompatible type "int"; expected "str"
 [builtins fixtures/paramspec.pyi]
+
+[case testParamSpecNoCrashOnUnificationAlias]
+import mod
+[file mod.pyi]
+from typing import Callable, Protocol, TypeVar, overload
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+R_co = TypeVar("R_co", covariant=True)
+Handler = Callable[P, R_co]
+
+class HandlerDecorator(Protocol):
+    def __call__(self, handler: Handler[P, R_co]) -> Handler[P, R_co]: ...
+
+@overload
+def event(event_handler: Handler[P, R_co]) -> Handler[P, R_co]: ...
+@overload
+def event(namespace: str, *args, **kwargs) -> HandlerDecorator: ...
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecNoCrashOnUnificationCallable]
+import mod
+[file mod.pyi]
+from typing import Callable, Protocol, TypeVar, overload
+from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+R_co = TypeVar("R_co", covariant=True)
+
+class HandlerDecorator(Protocol):
+    def __call__(self, handler: Callable[P, R_co]) -> Callable[P, R_co]: ...
+
+@overload
+def event(event_handler: Callable[P, R_co]) -> Callable[P, R_co]: ...
+@overload
+def event(namespace: str, *args, **kwargs) -> HandlerDecorator: ...
+[builtins fixtures/paramspec.pyi]

From fbc48afccdf47de43fba73f2bc0eaf43a3f7b310 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Fri, 13 Oct 2023 11:28:41 +0200
Subject: [PATCH 172/288] Fix `coverage` config (#16258)

fixes #16255
---
 pyproject.toml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index de32618f1a39..c43253fed982 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -109,13 +109,13 @@ xfail_strict = true
 
 [tool.coverage.run]
 branch = true
-source = "mypy"
+source = ["mypy"]
 parallel = true
 
 [tool.coverage.report]
 show_missing = true
 skip_covered = true
-omit = 'mypy/test/*'
+omit = ['mypy/test/*']
 exclude_lines = [
   '\#\s*pragma: no cover',
   '^\s*raise AssertionError\b',

From 2e52e98fd2873775a58616c097e93c96f58fc991 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 13 Oct 2023 11:30:54 +0100
Subject: [PATCH 173/288] Fix crash on ParamSpec unification (for real)
 (#16259)

Fixes https://github.com/python/mypy/issues/16257

Parenthesis strike back. I hope this is the last place where I had put
them wrong.
---
 mypy/expandtype.py                            |  3 +-
 .../unit/check-parameter-specification.test   | 33 +++++++++++++++++++
 2 files changed, 35 insertions(+), 1 deletion(-)

diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 4acb51e22268..44716e6da013 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -348,7 +348,8 @@ def visit_callable_type(self, t: CallableType) -> CallableType:
                 prefix = repl.prefix
                 clean_repl = repl.copy_modified(prefix=Parameters([], [], []))
                 return t.copy_modified(
-                    arg_types=self.expand_types(t.arg_types[:-2] + prefix.arg_types)
+                    arg_types=self.expand_types(t.arg_types[:-2])
+                    + prefix.arg_types
                     + [
                         clean_repl.with_flavor(ParamSpecFlavor.ARGS),
                         clean_repl.with_flavor(ParamSpecFlavor.KWARGS),
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index bb7859070f00..5b6024da687e 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -2013,3 +2013,36 @@ def event(event_handler: Callable[P, R_co]) -> Callable[P, R_co]: ...
 @overload
 def event(namespace: str, *args, **kwargs) -> HandlerDecorator: ...
 [builtins fixtures/paramspec.pyi]
+
+[case testParamSpecNoCrashOnUnificationPrefix]
+from typing import Any, Callable, TypeVar, overload
+from typing_extensions import ParamSpec, Concatenate
+
+T = TypeVar("T")
+U = TypeVar("U")
+V = TypeVar("V")
+W = TypeVar("W")
+P = ParamSpec("P")
+
+@overload
+def call(
+    func: Callable[Concatenate[T, P], U],
+    x: T,
+    *args: Any,
+    **kwargs: Any,
+) -> U: ...
+@overload
+def call(
+    func: Callable[Concatenate[T, U, P], V],
+    x: T,
+    y: U,
+    *args: Any,
+    **kwargs: Any,
+) -> V: ...
+def call(*args: Any, **kwargs: Any) -> Any: ...
+
+def test1(x: int) -> str: ...
+def test2(x: int, y: int) -> str: ...
+reveal_type(call(test1, 1))  # N: Revealed type is "builtins.str"
+reveal_type(call(test2, 1, 2))  # N: Revealed type is "builtins.str"
+[builtins fixtures/paramspec.pyi]

From feb0fa75ca7f3abb1217d94f6ffb55994b9a31c8 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
 <41898282+github-actions[bot]@users.noreply.github.com>
Date: Sun, 15 Oct 2023 00:33:28 -0700
Subject: [PATCH 174/288] Sync typeshed (#16266)

---
 mypy/typeshed/stdlib/VERSIONS                 |   1 +
 mypy/typeshed/stdlib/_ast.pyi                 |   2 +-
 mypy/typeshed/stdlib/_ctypes.pyi              |   2 +
 mypy/typeshed/stdlib/_curses.pyi              |   3 +-
 mypy/typeshed/stdlib/_locale.pyi              | 100 +++++++++
 mypy/typeshed/stdlib/_msi.pyi                 |   1 +
 mypy/typeshed/stdlib/_winapi.pyi              |   1 +
 mypy/typeshed/stdlib/argparse.pyi             |   6 +-
 mypy/typeshed/stdlib/asyncio/tasks.pyi        |  68 ++++++-
 .../stdlib/asyncio/windows_events.pyi         |  12 +-
 mypy/typeshed/stdlib/locale.pyi               | 191 +++++++++---------
 mypy/typeshed/stdlib/mimetypes.pyi            |   3 +-
 mypy/typeshed/stdlib/mmap.pyi                 |   2 +-
 mypy/typeshed/stdlib/msilib/text.pyi          |   2 +-
 mypy/typeshed/stdlib/msvcrt.pyi               |   6 +-
 mypy/typeshed/stdlib/os/__init__.pyi          |  19 +-
 mypy/typeshed/stdlib/posix.pyi                |   5 +-
 mypy/typeshed/stdlib/select.pyi               |   4 +-
 mypy/typeshed/stdlib/selectors.pyi            |  18 +-
 mypy/typeshed/stdlib/signal.pyi               |  10 +-
 mypy/typeshed/stdlib/socket.pyi               |   4 +-
 mypy/typeshed/stdlib/ssl.pyi                  |   6 +-
 mypy/typeshed/stdlib/subprocess.pyi           |   1 +
 mypy/typeshed/stdlib/winreg.pyi               |   2 +
 mypy/typeshed/stdlib/winsound.pyi             |   1 +
 mypy/typeshed/stdlib/zipfile.pyi              |  36 +++-
 26 files changed, 363 insertions(+), 143 deletions(-)
 create mode 100644 mypy/typeshed/stdlib/_locale.pyi

diff --git a/mypy/typeshed/stdlib/VERSIONS b/mypy/typeshed/stdlib/VERSIONS
index 49433e346765..9d4636a29a1d 100644
--- a/mypy/typeshed/stdlib/VERSIONS
+++ b/mypy/typeshed/stdlib/VERSIONS
@@ -35,6 +35,7 @@ _dummy_threading: 2.7-3.8
 _heapq: 2.7-
 _imp: 3.0-
 _json: 2.7-
+_locale: 2.7-
 _markupbase: 2.7-
 _msi: 2.7-
 _operator: 3.4-
diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi
index 05e2a08fdc88..402b770c0462 100644
--- a/mypy/typeshed/stdlib/_ast.pyi
+++ b/mypy/typeshed/stdlib/_ast.pyi
@@ -602,7 +602,7 @@ if sys.version_info >= (3, 12):
         name: _Identifier
 
     class TypeAlias(stmt):
-        __match_args__ = ("name", "typeparams", "value")
+        __match_args__ = ("name", "type_params", "value")
         name: Name
         type_params: list[type_param]
         value: expr
diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi
index b48b1f7d318c..8a891971e9f1 100644
--- a/mypy/typeshed/stdlib/_ctypes.pyi
+++ b/mypy/typeshed/stdlib/_ctypes.pyi
@@ -44,6 +44,8 @@ if sys.platform == "win32":
     def FormatError(code: int = ...) -> str: ...
     def get_last_error() -> int: ...
     def set_last_error(value: int) -> int: ...
+    def LoadLibrary(__name: str, __load_flags: int = 0) -> int: ...
+    def FreeLibrary(__handle: int) -> None: ...
 
 class _CDataMeta(type):
     # By default mypy complains about the following two methods, because strictly speaking cls
diff --git a/mypy/typeshed/stdlib/_curses.pyi b/mypy/typeshed/stdlib/_curses.pyi
index e2319a5fcc1f..3604f7abedb5 100644
--- a/mypy/typeshed/stdlib/_curses.pyi
+++ b/mypy/typeshed/stdlib/_curses.pyi
@@ -61,7 +61,8 @@ if sys.platform != "win32":
     A_DIM: int
     A_HORIZONTAL: int
     A_INVIS: int
-    A_ITALIC: int
+    if sys.platform != "darwin":
+        A_ITALIC: int
     A_LEFT: int
     A_LOW: int
     A_NORMAL: int
diff --git a/mypy/typeshed/stdlib/_locale.pyi b/mypy/typeshed/stdlib/_locale.pyi
new file mode 100644
index 000000000000..2b2fe03e4510
--- /dev/null
+++ b/mypy/typeshed/stdlib/_locale.pyi
@@ -0,0 +1,100 @@
+import sys
+from _typeshed import StrPath
+from collections.abc import Iterable, Mapping
+
+LC_CTYPE: int
+LC_COLLATE: int
+LC_TIME: int
+LC_MONETARY: int
+LC_NUMERIC: int
+LC_ALL: int
+CHAR_MAX: int
+
+def setlocale(category: int, locale: str | Iterable[str | None] | None = None) -> str: ...
+def localeconv() -> Mapping[str, int | str | list[int]]: ...
+
+if sys.version_info >= (3, 11):
+    def getencoding() -> str: ...
+
+def strcoll(__os1: str, __os2: str) -> int: ...
+def strxfrm(__string: str) -> str: ...
+
+# native gettext functions
+# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
+# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
+if sys.platform != "win32":
+    LC_MESSAGES: int
+
+    ABDAY_1: int
+    ABDAY_2: int
+    ABDAY_3: int
+    ABDAY_4: int
+    ABDAY_5: int
+    ABDAY_6: int
+    ABDAY_7: int
+
+    ABMON_1: int
+    ABMON_2: int
+    ABMON_3: int
+    ABMON_4: int
+    ABMON_5: int
+    ABMON_6: int
+    ABMON_7: int
+    ABMON_8: int
+    ABMON_9: int
+    ABMON_10: int
+    ABMON_11: int
+    ABMON_12: int
+
+    DAY_1: int
+    DAY_2: int
+    DAY_3: int
+    DAY_4: int
+    DAY_5: int
+    DAY_6: int
+    DAY_7: int
+
+    ERA: int
+    ERA_D_T_FMT: int
+    ERA_D_FMT: int
+    ERA_T_FMT: int
+
+    MON_1: int
+    MON_2: int
+    MON_3: int
+    MON_4: int
+    MON_5: int
+    MON_6: int
+    MON_7: int
+    MON_8: int
+    MON_9: int
+    MON_10: int
+    MON_11: int
+    MON_12: int
+
+    CODESET: int
+    D_T_FMT: int
+    D_FMT: int
+    T_FMT: int
+    T_FMT_AMPM: int
+    AM_STR: int
+    PM_STR: int
+
+    RADIXCHAR: int
+    THOUSEP: int
+    YESEXPR: int
+    NOEXPR: int
+    CRNCYSTR: int
+    ALT_DIGITS: int
+
+    def nl_langinfo(__key: int) -> str: ...
+
+    # This is dependent on `libintl.h` which is a part of `gettext`
+    # system dependency. These functions might be missing.
+    # But, we always say that they are present.
+    def gettext(__msg: str) -> str: ...
+    def dgettext(__domain: str | None, __msg: str) -> str: ...
+    def dcgettext(__domain: str | None, __msg: str, __category: int) -> str: ...
+    def textdomain(__domain: str | None) -> str: ...
+    def bindtextdomain(__domain: str, __dir: StrPath | None) -> str: ...
+    def bind_textdomain_codeset(__domain: str, __codeset: str | None) -> str | None: ...
diff --git a/mypy/typeshed/stdlib/_msi.pyi b/mypy/typeshed/stdlib/_msi.pyi
index 2fdbdfd0e9f4..160406a6d8d5 100644
--- a/mypy/typeshed/stdlib/_msi.pyi
+++ b/mypy/typeshed/stdlib/_msi.pyi
@@ -1,6 +1,7 @@
 import sys
 
 if sys.platform == "win32":
+    class MSIError(Exception): ...
     # Actual typename View, not exposed by the implementation
     class _View:
         def Execute(self, params: _Record | None = ...) -> None: ...
diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi
index b51d844701ac..e887fb38a7fa 100644
--- a/mypy/typeshed/stdlib/_winapi.pyi
+++ b/mypy/typeshed/stdlib/_winapi.pyi
@@ -255,3 +255,4 @@ if sys.platform == "win32":
 
     if sys.version_info >= (3, 12):
         def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ...
+        def NeedCurrentDirectoryForExePath(__exe_name: str) -> bool: ...
diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi
index 0004250b17a9..924cc8986114 100644
--- a/mypy/typeshed/stdlib/argparse.pyi
+++ b/mypy/typeshed/stdlib/argparse.pyi
@@ -342,11 +342,11 @@ if sys.version_info >= (3, 12):
             option_strings: Sequence[str],
             dest: str,
             default: _T | str | None = None,
-            type: Callable[[str], _T] | FileType | None = sentinel,  # noqa: Y011
-            choices: Iterable[_T] | None = sentinel,  # noqa: Y011
+            type: Callable[[str], _T] | FileType | None = sentinel,
+            choices: Iterable[_T] | None = sentinel,
             required: bool = False,
             help: str | None = None,
-            metavar: str | tuple[str, ...] | None = sentinel,  # noqa: Y011
+            metavar: str | tuple[str, ...] | None = sentinel,
         ) -> None: ...
 
 elif sys.version_info >= (3, 9):
diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi
index b6929deb0fae..366ac7fa35e3 100644
--- a/mypy/typeshed/stdlib/asyncio/tasks.pyi
+++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi
@@ -68,6 +68,7 @@ _T2 = TypeVar("_T2")
 _T3 = TypeVar("_T3")
 _T4 = TypeVar("_T4")
 _T5 = TypeVar("_T5")
+_T6 = TypeVar("_T6")
 _FT = TypeVar("_FT", bound=Future[Any])
 _FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T]
 _TaskYieldType: TypeAlias = Future[object] | None
@@ -131,6 +132,19 @@ if sys.version_info >= (3, 10):
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ...
     @overload
+    def gather(  # type: ignore[misc]
+        __coro_or_future1: _FutureLike[_T1],
+        __coro_or_future2: _FutureLike[_T2],
+        __coro_or_future3: _FutureLike[_T3],
+        __coro_or_future4: _FutureLike[_T4],
+        __coro_or_future5: _FutureLike[_T5],
+        __coro_or_future6: _FutureLike[_T6],
+        *,
+        return_exceptions: Literal[False] = False,
+    ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ...
+    @overload
+    def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: Literal[False] = False) -> Future[list[_T]]: ...  # type: ignore[misc]
+    @overload
     def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ...  # type: ignore[misc]
     @overload
     def gather(  # type: ignore[misc]
@@ -166,7 +180,27 @@ if sys.version_info >= (3, 10):
         tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException]
     ]: ...
     @overload
-    def gather(*coros_or_futures: _FutureLike[Any], return_exceptions: bool = False) -> Future[list[Any]]: ...
+    def gather(  # type: ignore[misc]
+        __coro_or_future1: _FutureLike[_T1],
+        __coro_or_future2: _FutureLike[_T2],
+        __coro_or_future3: _FutureLike[_T3],
+        __coro_or_future4: _FutureLike[_T4],
+        __coro_or_future5: _FutureLike[_T5],
+        __coro_or_future6: _FutureLike[_T6],
+        *,
+        return_exceptions: bool,
+    ) -> Future[
+        tuple[
+            _T1 | BaseException,
+            _T2 | BaseException,
+            _T3 | BaseException,
+            _T4 | BaseException,
+            _T5 | BaseException,
+            _T6 | BaseException,
+        ]
+    ]: ...
+    @overload
+    def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: bool) -> Future[list[_T | BaseException]]: ...
 
 else:
     @overload
@@ -212,6 +246,22 @@ else:
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ...
     @overload
+    def gather(  # type: ignore[misc]
+        __coro_or_future1: _FutureLike[_T1],
+        __coro_or_future2: _FutureLike[_T2],
+        __coro_or_future3: _FutureLike[_T3],
+        __coro_or_future4: _FutureLike[_T4],
+        __coro_or_future5: _FutureLike[_T5],
+        __coro_or_future6: _FutureLike[_T6],
+        *,
+        loop: AbstractEventLoop | None = None,
+        return_exceptions: Literal[False] = False,
+    ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ...
+    @overload
+    def gather(  # type: ignore[misc]
+        *coros_or_futures: _FutureLike[_T], loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False
+    ) -> Future[list[_T]]: ...
+    @overload
     def gather(  # type: ignore[misc]
         __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: bool
     ) -> Future[tuple[_T1 | BaseException]]: ...
@@ -249,16 +299,24 @@ else:
         __coro_or_future3: _FutureLike[_T3],
         __coro_or_future4: _FutureLike[_T4],
         __coro_or_future5: _FutureLike[_T5],
+        __coro_or_future6: _FutureLike[_T6],
         *,
         loop: AbstractEventLoop | None = None,
         return_exceptions: bool,
     ) -> Future[
-        tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException]
+        tuple[
+            _T1 | BaseException,
+            _T2 | BaseException,
+            _T3 | BaseException,
+            _T4 | BaseException,
+            _T5 | BaseException,
+            _T6 | BaseException,
+        ]
     ]: ...
     @overload
-    def gather(
-        *coros_or_futures: _FutureLike[Any], loop: AbstractEventLoop | None = None, return_exceptions: bool = False
-    ) -> Future[list[Any]]: ...
+    def gather(  # type: ignore[misc]
+        *coros_or_futures: _FutureLike[_T], loop: AbstractEventLoop | None = None, return_exceptions: bool
+    ) -> Future[list[_T | BaseException]]: ...
 
 def run_coroutine_threadsafe(coro: _FutureLike[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ...
 
diff --git a/mypy/typeshed/stdlib/asyncio/windows_events.pyi b/mypy/typeshed/stdlib/asyncio/windows_events.pyi
index 2942a25c0ac4..8e643dd4a3f2 100644
--- a/mypy/typeshed/stdlib/asyncio/windows_events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/windows_events.pyi
@@ -1,6 +1,6 @@
 import socket
 import sys
-from _typeshed import Incomplete, WriteableBuffer
+from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer
 from collections.abc import Callable
 from typing import IO, Any, ClassVar, NoReturn
 from typing_extensions import Literal
@@ -48,6 +48,12 @@ if sys.platform == "win32":
         def select(self, timeout: int | None = None) -> list[futures.Future[Any]]: ...
         def recv(self, conn: socket.socket, nbytes: int, flags: int = 0) -> futures.Future[bytes]: ...
         def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ...
+        def recvfrom(
+            self, conn: socket.socket, nbytes: int, flags: int = 0
+        ) -> futures.Future[tuple[bytes, socket._RetAddress]]: ...
+        def sendto(
+            self, conn: socket.socket, buf: ReadableBuffer, flags: int = 0, addr: socket._Address | None = None
+        ) -> futures.Future[int]: ...
         def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ...
         def accept(self, listener: socket.socket) -> futures.Future[Any]: ...
         def connect(
@@ -60,6 +66,10 @@ if sys.platform == "win32":
         async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ...
         def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: ...
         def close(self) -> None: ...
+        if sys.version_info >= (3, 11):
+            def recvfrom_into(
+                self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0
+            ) -> futures.Future[tuple[int, socket._RetAddress]]: ...
     SelectorEventLoop = _WindowsSelectorEventLoop
 
     class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy):
diff --git a/mypy/typeshed/stdlib/locale.pyi b/mypy/typeshed/stdlib/locale.pyi
index 3753700ea889..2e95c659dbcd 100644
--- a/mypy/typeshed/stdlib/locale.pyi
+++ b/mypy/typeshed/stdlib/locale.pyi
@@ -1,6 +1,95 @@
 import sys
-from _typeshed import StrPath
-from collections.abc import Callable, Iterable, Mapping
+from _locale import (
+    CHAR_MAX as CHAR_MAX,
+    LC_ALL as LC_ALL,
+    LC_COLLATE as LC_COLLATE,
+    LC_CTYPE as LC_CTYPE,
+    LC_MONETARY as LC_MONETARY,
+    LC_NUMERIC as LC_NUMERIC,
+    LC_TIME as LC_TIME,
+    localeconv as localeconv,
+    setlocale as setlocale,
+    strcoll as strcoll,
+    strxfrm as strxfrm,
+)
+
+# This module defines a function "str()", which is why "str" can't be used
+# as a type annotation or type alias.
+from builtins import str as _str
+from collections.abc import Callable
+from decimal import Decimal
+from typing import Any
+
+if sys.version_info >= (3, 11):
+    from _locale import getencoding as getencoding
+
+# Some parts of the `_locale` module are platform-specific:
+if sys.platform != "win32":
+    from _locale import (
+        ABDAY_1 as ABDAY_1,
+        ABDAY_2 as ABDAY_2,
+        ABDAY_3 as ABDAY_3,
+        ABDAY_4 as ABDAY_4,
+        ABDAY_5 as ABDAY_5,
+        ABDAY_6 as ABDAY_6,
+        ABDAY_7 as ABDAY_7,
+        ABMON_1 as ABMON_1,
+        ABMON_2 as ABMON_2,
+        ABMON_3 as ABMON_3,
+        ABMON_4 as ABMON_4,
+        ABMON_5 as ABMON_5,
+        ABMON_6 as ABMON_6,
+        ABMON_7 as ABMON_7,
+        ABMON_8 as ABMON_8,
+        ABMON_9 as ABMON_9,
+        ABMON_10 as ABMON_10,
+        ABMON_11 as ABMON_11,
+        ABMON_12 as ABMON_12,
+        ALT_DIGITS as ALT_DIGITS,
+        AM_STR as AM_STR,
+        CODESET as CODESET,
+        CRNCYSTR as CRNCYSTR,
+        D_FMT as D_FMT,
+        D_T_FMT as D_T_FMT,
+        DAY_1 as DAY_1,
+        DAY_2 as DAY_2,
+        DAY_3 as DAY_3,
+        DAY_4 as DAY_4,
+        DAY_5 as DAY_5,
+        DAY_6 as DAY_6,
+        DAY_7 as DAY_7,
+        ERA as ERA,
+        ERA_D_FMT as ERA_D_FMT,
+        ERA_D_T_FMT as ERA_D_T_FMT,
+        ERA_T_FMT as ERA_T_FMT,
+        LC_MESSAGES as LC_MESSAGES,
+        MON_1 as MON_1,
+        MON_2 as MON_2,
+        MON_3 as MON_3,
+        MON_4 as MON_4,
+        MON_5 as MON_5,
+        MON_6 as MON_6,
+        MON_7 as MON_7,
+        MON_8 as MON_8,
+        MON_9 as MON_9,
+        MON_10 as MON_10,
+        MON_11 as MON_11,
+        MON_12 as MON_12,
+        NOEXPR as NOEXPR,
+        PM_STR as PM_STR,
+        RADIXCHAR as RADIXCHAR,
+        T_FMT as T_FMT,
+        T_FMT_AMPM as T_FMT_AMPM,
+        THOUSEP as THOUSEP,
+        YESEXPR as YESEXPR,
+        bind_textdomain_codeset as bind_textdomain_codeset,
+        bindtextdomain as bindtextdomain,
+        dcgettext as dcgettext,
+        dgettext as dgettext,
+        gettext as gettext,
+        nl_langinfo as nl_langinfo,
+        textdomain as textdomain,
+    )
 
 __all__ = [
     "getlocale",
@@ -20,7 +109,6 @@ __all__ = [
     "normalize",
     "LC_CTYPE",
     "LC_COLLATE",
-    "LC_MESSAGES",
     "LC_TIME",
     "LC_MONETARY",
     "LC_NUMERIC",
@@ -34,88 +122,11 @@ if sys.version_info >= (3, 11):
 if sys.version_info < (3, 12):
     __all__ += ["format"]
 
-# This module defines a function "str()", which is why "str" can't be used
-# as a type annotation or type alias.
-from builtins import str as _str
-from decimal import Decimal
-from typing import Any
-
-CODESET: int
-D_T_FMT: int
-D_FMT: int
-T_FMT: int
-T_FMT_AMPM: int
-AM_STR: int
-PM_STR: int
-
-DAY_1: int
-DAY_2: int
-DAY_3: int
-DAY_4: int
-DAY_5: int
-DAY_6: int
-DAY_7: int
-ABDAY_1: int
-ABDAY_2: int
-ABDAY_3: int
-ABDAY_4: int
-ABDAY_5: int
-ABDAY_6: int
-ABDAY_7: int
-
-MON_1: int
-MON_2: int
-MON_3: int
-MON_4: int
-MON_5: int
-MON_6: int
-MON_7: int
-MON_8: int
-MON_9: int
-MON_10: int
-MON_11: int
-MON_12: int
-ABMON_1: int
-ABMON_2: int
-ABMON_3: int
-ABMON_4: int
-ABMON_5: int
-ABMON_6: int
-ABMON_7: int
-ABMON_8: int
-ABMON_9: int
-ABMON_10: int
-ABMON_11: int
-ABMON_12: int
-
-RADIXCHAR: int
-THOUSEP: int
-YESEXPR: int
-NOEXPR: int
-CRNCYSTR: int
-
-ERA: int
-ERA_D_T_FMT: int
-ERA_D_FMT: int
-ERA_T_FMT: int
-
-ALT_DIGITS: int
-
-LC_CTYPE: int
-LC_COLLATE: int
-LC_TIME: int
-LC_MONETARY: int
-LC_MESSAGES: int
-LC_NUMERIC: int
-LC_ALL: int
-
-CHAR_MAX: int
+if sys.platform != "win32":
+    __all__ += ["LC_MESSAGES"]
 
 class Error(Exception): ...
 
-def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ...
-def localeconv() -> Mapping[_str, int | _str | list[int]]: ...
-def nl_langinfo(__key: int) -> _str: ...
 def getdefaultlocale(
     envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE")
 ) -> tuple[_str | None, _str | None]: ...
@@ -123,8 +134,6 @@ def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ...
 def getpreferredencoding(do_setlocale: bool = True) -> _str: ...
 def normalize(localename: _str) -> _str: ...
 def resetlocale(category: int = ...) -> None: ...
-def strcoll(__os1: _str, __os2: _str) -> int: ...
-def strxfrm(__string: _str) -> _str: ...
 
 if sys.version_info < (3, 12):
     def format(
@@ -138,20 +147,6 @@ def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ...
 def atoi(string: _str) -> int: ...
 def str(val: float) -> _str: ...
 
-# native gettext functions
-# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
-# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
-if sys.platform == "linux" or sys.platform == "darwin":
-    def gettext(__msg: _str) -> _str: ...
-    def dgettext(__domain: _str | None, __msg: _str) -> _str: ...
-    def dcgettext(__domain: _str | None, __msg: _str, __category: int) -> _str: ...
-    def textdomain(__domain: _str | None) -> _str: ...
-    def bindtextdomain(__domain: _str, __dir: StrPath | None) -> _str: ...
-    def bind_textdomain_codeset(__domain: _str, __codeset: _str | None) -> _str | None: ...
-
-if sys.version_info >= (3, 11):
-    def getencoding() -> _str: ...
-
 locale_alias: dict[_str, _str]  # undocumented
 locale_encoding_alias: dict[_str, _str]  # undocumented
 windows_locale: dict[int, _str]  # undocumented
diff --git a/mypy/typeshed/stdlib/mimetypes.pyi b/mypy/typeshed/stdlib/mimetypes.pyi
index 128a05fa5752..532cc5e3ce39 100644
--- a/mypy/typeshed/stdlib/mimetypes.pyi
+++ b/mypy/typeshed/stdlib/mimetypes.pyi
@@ -53,5 +53,4 @@ class MimeTypes:
     def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ...
     def read(self, filename: str, strict: bool = True) -> None: ...
     def readfp(self, fp: IO[str], strict: bool = True) -> None: ...
-    if sys.platform == "win32":
-        def read_windows_registry(self, strict: bool = True) -> None: ...
+    def read_windows_registry(self, strict: bool = True) -> None: ...
diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi
index 09319980692f..9a213a8b8cf0 100644
--- a/mypy/typeshed/stdlib/mmap.pyi
+++ b/mypy/typeshed/stdlib/mmap.pyi
@@ -28,7 +28,7 @@ if sys.platform != "win32":
     PROT_READ: int
     PROT_WRITE: int
 
-    PAGESIZE: int
+PAGESIZE: int
 
 class mmap(Iterable[int], Sized):
     if sys.platform == "win32":
diff --git a/mypy/typeshed/stdlib/msilib/text.pyi b/mypy/typeshed/stdlib/msilib/text.pyi
index 1353cf8a2392..441c843ca6cf 100644
--- a/mypy/typeshed/stdlib/msilib/text.pyi
+++ b/mypy/typeshed/stdlib/msilib/text.pyi
@@ -3,5 +3,5 @@ import sys
 if sys.platform == "win32":
     ActionText: list[tuple[str, str, str | None]]
     UIText: list[tuple[str, str | None]]
-
+    dirname: str
     tables: list[str]
diff --git a/mypy/typeshed/stdlib/msvcrt.pyi b/mypy/typeshed/stdlib/msvcrt.pyi
index 5849b9b00ca0..768edbc18ab3 100644
--- a/mypy/typeshed/stdlib/msvcrt.pyi
+++ b/mypy/typeshed/stdlib/msvcrt.pyi
@@ -1,8 +1,9 @@
 import sys
-from typing_extensions import Literal
+from typing_extensions import Final, Literal
 
 # This module is only available on Windows
 if sys.platform == "win32":
+    CRT_ASSEMBLY_VERSION: Final[str]
     LK_UNLCK: Literal[0]
     LK_LOCK: Literal[1]
     LK_NBLCK: Literal[2]
@@ -26,3 +27,6 @@ if sys.platform == "win32":
     def ungetch(__char: bytes | bytearray) -> None: ...
     def ungetwch(__unicode_char: str) -> None: ...
     def heapmin() -> None: ...
+    def SetErrorMode(__mode: int) -> int: ...
+    if sys.version_info >= (3, 10):
+        def GetErrorMode() -> int: ...  # undocumented
diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi
index fa4c55011eba..7fd04218fd7c 100644
--- a/mypy/typeshed/stdlib/os/__init__.pyi
+++ b/mypy/typeshed/stdlib/os/__init__.pyi
@@ -70,9 +70,20 @@ if sys.platform != "win32":
         POSIX_FADV_WILLNEED: int
         POSIX_FADV_DONTNEED: int
 
-    SF_NODISKIO: int
-    SF_MNOWAIT: int
-    SF_SYNC: int
+    if sys.platform != "linux" and sys.platform != "darwin":
+        # In the os-module docs, these are marked as being available
+        # on "Unix, not Emscripten, not WASI."
+        # However, in the source code, a comment indicates they're "FreeBSD constants".
+        # sys.platform could have one of many values on a FreeBSD Python build,
+        # so the sys-module docs recommend doing `if sys.platform.startswith('freebsd')`
+        # to detect FreeBSD builds. Unfortunately that would be too dynamic
+        # for type checkers, however.
+        SF_NODISKIO: int
+        SF_MNOWAIT: int
+        SF_SYNC: int
+
+        if sys.version_info >= (3, 11):
+            SF_NOCACHE: int
 
     if sys.platform == "linux":
         XATTR_SIZE_MAX: int
@@ -282,6 +293,8 @@ if sys.platform != "win32":
     EX_PROTOCOL: int
     EX_NOPERM: int
     EX_CONFIG: int
+
+if sys.platform != "win32" and sys.platform != "darwin":
     EX_NOTFOUND: int
 
 P_NOWAIT: int
diff --git a/mypy/typeshed/stdlib/posix.pyi b/mypy/typeshed/stdlib/posix.pyi
index ab6bf2e63be5..81cc93c5aa66 100644
--- a/mypy/typeshed/stdlib/posix.pyi
+++ b/mypy/typeshed/stdlib/posix.pyi
@@ -14,7 +14,6 @@ if sys.platform != "win32":
         EX_NOHOST as EX_NOHOST,
         EX_NOINPUT as EX_NOINPUT,
         EX_NOPERM as EX_NOPERM,
-        EX_NOTFOUND as EX_NOTFOUND,
         EX_NOUSER as EX_NOUSER,
         EX_OK as EX_OK,
         EX_OSERR as EX_OSERR,
@@ -29,6 +28,7 @@ if sys.platform != "win32":
         F_TEST as F_TEST,
         F_TLOCK as F_TLOCK,
         F_ULOCK as F_ULOCK,
+        NGROUPS_MAX as NGROUPS_MAX,
         O_APPEND as O_APPEND,
         O_ASYNC as O_ASYNC,
         O_CREAT as O_CREAT,
@@ -222,6 +222,9 @@ if sys.platform != "win32":
         writev as writev,
     )
 
+    if sys.platform != "darwin":
+        from os import EX_NOTFOUND as EX_NOTFOUND
+
     if sys.platform == "linux":
         from os import (
             GRND_NONBLOCK as GRND_NONBLOCK,
diff --git a/mypy/typeshed/stdlib/select.pyi b/mypy/typeshed/stdlib/select.pyi
index c86d20c352e0..5e2828e42c30 100644
--- a/mypy/typeshed/stdlib/select.pyi
+++ b/mypy/typeshed/stdlib/select.pyi
@@ -15,7 +15,8 @@ if sys.platform != "win32":
     POLLOUT: int
     POLLPRI: int
     POLLRDBAND: int
-    POLLRDHUP: int
+    if sys.platform == "linux":
+        POLLRDHUP: int
     POLLRDNORM: int
     POLLWRBAND: int
     POLLWRNORM: int
@@ -136,7 +137,6 @@ if sys.platform == "linux":
     EPOLLRDNORM: int
     EPOLLWRBAND: int
     EPOLLWRNORM: int
-    EPOLL_RDHUP: int
     EPOLL_CLOEXEC: int
 
 if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32":
diff --git a/mypy/typeshed/stdlib/selectors.pyi b/mypy/typeshed/stdlib/selectors.pyi
index 90a923f09355..043df9253316 100644
--- a/mypy/typeshed/stdlib/selectors.pyi
+++ b/mypy/typeshed/stdlib/selectors.pyi
@@ -59,15 +59,21 @@ class DevpollSelector(BaseSelector):
     def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ...
     def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ...
 
-class KqueueSelector(BaseSelector):
-    def fileno(self) -> int: ...
-    def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ...
-    def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ...
-    def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ...
-    def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ...
+if sys.platform != "win32":
+    class KqueueSelector(BaseSelector):
+        def fileno(self) -> int: ...
+        def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ...
+        def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ...
+        def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ...
+        def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ...
 
+# Not a real class at runtime, it is just a conditional alias to other real selectors.
+# The runtime logic is more fine-grained than a `sys.platform` check;
+# not really expressible in the stubs
 class DefaultSelector(BaseSelector):
     def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ...
     def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ...
     def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ...
     def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ...
+    if sys.platform != "win32":
+        def fileno(self) -> int: ...
diff --git a/mypy/typeshed/stdlib/signal.pyi b/mypy/typeshed/stdlib/signal.pyi
index 72c78f1b69f5..906a6dabe192 100644
--- a/mypy/typeshed/stdlib/signal.pyi
+++ b/mypy/typeshed/stdlib/signal.pyi
@@ -10,10 +10,8 @@ NSIG: int
 
 class Signals(IntEnum):
     SIGABRT: int
-    SIGEMT: int
     SIGFPE: int
     SIGILL: int
-    SIGINFO: int
     SIGINT: int
     SIGSEGV: int
     SIGTERM: int
@@ -47,6 +45,9 @@ class Signals(IntEnum):
         SIGWINCH: int
         SIGXCPU: int
         SIGXFSZ: int
+        if sys.platform != "linux":
+            SIGEMT: int
+            SIGINFO: int
         if sys.platform != "darwin":
             SIGCLD: int
             SIGPOLL: int
@@ -77,10 +78,8 @@ else:
     def signal(__signalnum: _SIGNUM, __handler: _HANDLER) -> _HANDLER: ...
 
 SIGABRT: Signals
-SIGEMT: Signals
 SIGFPE: Signals
 SIGILL: Signals
-SIGINFO: Signals
 SIGINT: Signals
 SIGSEGV: Signals
 SIGTERM: Signals
@@ -90,6 +89,9 @@ if sys.platform == "win32":
     CTRL_C_EVENT: Signals
     CTRL_BREAK_EVENT: Signals
 else:
+    if sys.platform != "linux":
+        SIGINFO: Signals
+        SIGEMT: Signals
     SIGALRM: Signals
     SIGBUS: Signals
     SIGCHLD: Signals
diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi
index da06ce2c2b06..cc0cbe3709af 100644
--- a/mypy/typeshed/stdlib/socket.pyi
+++ b/mypy/typeshed/stdlib/socket.pyi
@@ -129,7 +129,9 @@ if sys.platform != "darwin" or sys.version_info >= (3, 9):
         IPV6_RTHDR as IPV6_RTHDR,
     )
 
-if sys.platform != "darwin":
+if sys.platform == "darwin":
+    from _socket import PF_SYSTEM as PF_SYSTEM, SYSPROTO_CONTROL as SYSPROTO_CONTROL
+else:
     from _socket import SO_EXCLUSIVEADDRUSE as SO_EXCLUSIVEADDRUSE
 
 if sys.version_info >= (3, 10):
diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi
index faf667afb475..d7f256d031ac 100644
--- a/mypy/typeshed/stdlib/ssl.pyi
+++ b/mypy/typeshed/stdlib/ssl.pyi
@@ -4,7 +4,7 @@ import sys
 from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer
 from collections.abc import Callable, Iterable
 from typing import Any, NamedTuple, overload
-from typing_extensions import Literal, Self, TypeAlias, TypedDict, final
+from typing_extensions import Literal, Never, Self, TypeAlias, TypedDict, final
 
 _PCTRTT: TypeAlias = tuple[tuple[str, str], ...]
 _PCTRTTT: TypeAlias = tuple[_PCTRTT, ...]
@@ -367,6 +367,10 @@ class SSLSocket(socket.socket):
     def pending(self) -> int: ...
     if sys.version_info >= (3, 8):
         def verify_client_post_handshake(self) -> None: ...
+    # These methods always raise `NotImplementedError`:
+    def recvmsg(self, *args: Never, **kwargs: Never) -> Never: ...  # type: ignore[override]
+    def recvmsg_into(self, *args: Never, **kwargs: Never) -> Never: ...  # type: ignore[override]
+    def sendmsg(self, *args: Never, **kwargs: Never) -> Never: ...  # type: ignore[override]
 
 class TLSVersion(enum.IntEnum):
     MINIMUM_SUPPORTED: int
diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi
index 346e4d5513d8..1013db7ee984 100644
--- a/mypy/typeshed/stdlib/subprocess.pyi
+++ b/mypy/typeshed/stdlib/subprocess.pyi
@@ -2600,6 +2600,7 @@ if sys.platform == "win32":
         hStdError: Any | None
         wShowWindow: int
         lpAttributeList: Mapping[str, Any]
+        def copy(self) -> STARTUPINFO: ...
     from _winapi import (
         ABOVE_NORMAL_PRIORITY_CLASS as ABOVE_NORMAL_PRIORITY_CLASS,
         BELOW_NORMAL_PRIORITY_CLASS as BELOW_NORMAL_PRIORITY_CLASS,
diff --git a/mypy/typeshed/stdlib/winreg.pyi b/mypy/typeshed/stdlib/winreg.pyi
index 337bd9706050..613b239ff663 100644
--- a/mypy/typeshed/stdlib/winreg.pyi
+++ b/mypy/typeshed/stdlib/winreg.pyi
@@ -99,3 +99,5 @@ if sys.platform == "win32":
         def Close(self) -> None: ...
         def Detach(self) -> int: ...
         def __hash__(self) -> int: ...
+        @property
+        def handle(self) -> int: ...
diff --git a/mypy/typeshed/stdlib/winsound.pyi b/mypy/typeshed/stdlib/winsound.pyi
index 9b2b57a38986..aa04fdc27a01 100644
--- a/mypy/typeshed/stdlib/winsound.pyi
+++ b/mypy/typeshed/stdlib/winsound.pyi
@@ -4,6 +4,7 @@ from typing import overload
 from typing_extensions import Literal
 
 if sys.platform == "win32":
+    SND_APPLICATION: Literal[128]
     SND_FILENAME: Literal[131072]
     SND_ALIAS: Literal[65536]
     SND_LOOP: Literal[8]
diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi
index dc07eb3f2a38..b7144f3ab528 100644
--- a/mypy/typeshed/stdlib/zipfile.pyi
+++ b/mypy/typeshed/stdlib/zipfile.pyi
@@ -2,9 +2,10 @@ import io
 import sys
 from _typeshed import SizedBuffer, StrOrBytesPath, StrPath
 from collections.abc import Callable, Iterable, Iterator
+from io import TextIOWrapper
 from os import PathLike
 from types import TracebackType
-from typing import IO, Any, Protocol, overload
+from typing import IO, Protocol, overload
 from typing_extensions import Literal, Self, TypeAlias
 
 __all__ = [
@@ -223,11 +224,18 @@ class ZipInfo:
     def FileHeader(self, zip64: bool | None = None) -> bytes: ...
 
 if sys.version_info >= (3, 8):
-    if sys.version_info < (3, 9):
-        class _PathOpenProtocol(Protocol):
-            def __call__(self, mode: _ReadWriteMode = "r", pwd: bytes | None = ..., *, force_zip64: bool = ...) -> IO[bytes]: ...
+    class CompleteDirs(ZipFile):
+        def resolve_dir(self, name: str) -> str: ...
+        @overload
+        @classmethod
+        def make(cls, source: ZipFile) -> CompleteDirs: ...
+        @overload
+        @classmethod
+        def make(cls: type[Self], source: StrPath | IO[bytes]) -> Self: ...
 
     class Path:
+        root: CompleteDirs
+        def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ...
         @property
         def name(self) -> str: ...
         @property
@@ -243,19 +251,25 @@ if sys.version_info >= (3, 8):
             @property
             def stem(self) -> str: ...
 
-        def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ...
         if sys.version_info >= (3, 9):
+            @overload
             def open(
                 self,
-                mode: _ReadWriteBinaryMode = "r",
+                mode: Literal["r", "w"] = "r",
                 encoding: str | None = None,
-                *args: Any,
+                errors: str | None = None,
+                newline: str | None = None,
+                line_buffering: bool = ...,
+                write_through: bool = ...,
+                *,
                 pwd: bytes | None = None,
-                **kwargs: Any,
-            ) -> IO[bytes]: ...
+            ) -> TextIOWrapper: ...
+            @overload
+            def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ...
         else:
-            @property
-            def open(self) -> _PathOpenProtocol: ...
+            def open(
+                self, mode: _ReadWriteBinaryMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False
+            ) -> IO[bytes]: ...
 
         if sys.version_info >= (3, 10):
             def iterdir(self) -> Iterator[Self]: ...

From 940fceb59a55513913c3c9c1eaf89a7f5ee774d6 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sun, 15 Oct 2023 12:12:44 -0700
Subject: [PATCH 175/288] [mypyc] Fix direct __dict__ access on inner functions
 in new Python (#16084)

Fixes #16077
---
 mypyc/codegen/emitclass.py         |  5 ++++-
 mypyc/test-data/run-functions.test | 30 ++++++++++++++++++++++++++++++
 2 files changed, 34 insertions(+), 1 deletion(-)

diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py
index 62e1b4b2dea1..8dcf7212b694 100644
--- a/mypyc/codegen/emitclass.py
+++ b/mypyc/codegen/emitclass.py
@@ -217,7 +217,7 @@ def generate_class(cl: ClassIR, module: str, emitter: Emitter) -> None:
     fields["tp_name"] = f'"{name}"'
 
     generate_full = not cl.is_trait and not cl.builtin_base
-    needs_getseters = cl.needs_getseters or not cl.is_generated
+    needs_getseters = cl.needs_getseters or not cl.is_generated or cl.has_dict
 
     if not cl.builtin_base:
         fields["tp_new"] = new_name
@@ -886,6 +886,9 @@ def generate_getseters_table(cl: ClassIR, name: str, emitter: Emitter) -> None:
         else:
             emitter.emit_line("NULL, NULL, NULL},")
 
+    if cl.has_dict:
+        emitter.emit_line('{"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},')
+
     emitter.emit_line("{NULL}  /* Sentinel */")
     emitter.emit_line("};")
 
diff --git a/mypyc/test-data/run-functions.test b/mypyc/test-data/run-functions.test
index 21993891c4e3..bd8f1a9197dd 100644
--- a/mypyc/test-data/run-functions.test
+++ b/mypyc/test-data/run-functions.test
@@ -1256,3 +1256,33 @@ def foo(**kwargs: Unpack[Person]) -> None:
 foo(name='Jennifer', age=38)
 [out]
 Jennifer
+
+[case testNestedFunctionDunderDict312]
+import sys
+
+def foo() -> None:
+    def inner() -> str: return "bar"
+    print(inner.__dict__)  # type: ignore[attr-defined]
+    inner.__dict__.update({"x": 1})  # type: ignore[attr-defined]
+    print(inner.__dict__)  # type: ignore[attr-defined]
+    print(inner.x)  # type: ignore[attr-defined]
+
+if sys.version_info >= (3, 12):  # type: ignore
+    foo()
+[out]
+[out version>=3.12]
+{}
+{'x': 1}
+1
+
+[case testFunctoolsUpdateWrapper]
+import functools
+
+def bar() -> None:
+    def inner() -> str: return "bar"
+    functools.update_wrapper(inner, bar)  # type: ignore
+    print(inner.__dict__)  # type: ignore
+
+bar()
+[out]
+{'__module__': 'native', '__name__': 'bar', '__qualname__': 'bar', '__doc__': None, '__wrapped__': <built-in function bar>}

From ff9deb3001d9c7cc84a1e2fed9125bf456b1d68b Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sun, 15 Oct 2023 21:44:02 +0100
Subject: [PATCH 176/288] Correctly handle runtime type applications of
 variadic types (#16240)

This adds some missing pieces to runtime type application handling for
both `TypeVarTuple` and `ParamSpec`. Everything is straightforward
(maybe a bit hacky, but we already import `typeanal` in `checkexpr` for
similar purposes, e.g. type aliases in runtime context).

Fixes https://github.com/python/mypy/issues/14799
---
 mypy/checkexpr.py                             | 34 ++++++++++++++++---
 .../unit/check-parameter-specification.test   | 13 +++++++
 test-data/unit/check-typevar-tuple.test       | 20 +++++++++++
 3 files changed, 62 insertions(+), 5 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index fd155ff87379..a1dd6d830758 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -28,7 +28,7 @@
 from mypy.maptype import map_instance_to_supertype
 from mypy.meet import is_overlapping_types, narrow_declared_type
 from mypy.message_registry import ErrorMessage
-from mypy.messages import MessageBuilder
+from mypy.messages import MessageBuilder, format_type
 from mypy.nodes import (
     ARG_NAMED,
     ARG_POS,
@@ -116,10 +116,12 @@
 from mypy.type_visitor import TypeTranslator
 from mypy.typeanal import (
     check_for_explicit_any,
+    fix_instance,
     has_any_from_unimported_type,
     instantiate_type_alias,
     make_optional_type,
     set_any_tvars,
+    validate_instance,
 )
 from mypy.typeops import (
     callable_type,
@@ -166,10 +168,12 @@
     TypeVarLikeType,
     TypeVarTupleType,
     TypeVarType,
+    UnboundType,
     UninhabitedType,
     UnionType,
     UnpackType,
     find_unpack_in_list,
+    flatten_nested_tuples,
     flatten_nested_unions,
     get_proper_type,
     get_proper_types,
@@ -4637,15 +4641,35 @@ class C(Generic[T, Unpack[Ts]]): ...
         similar to how it is done in other places using split_with_prefix_and_suffix().
         """
         vars = t.variables
+        args = flatten_nested_tuples(args)
+
+        # TODO: this logic is duplicated with semanal_typeargs.
+        for tv, arg in zip(t.variables, args):
+            if isinstance(tv, ParamSpecType):
+                if not isinstance(
+                    get_proper_type(arg), (Parameters, ParamSpecType, AnyType, UnboundType)
+                ):
+                    self.chk.fail(
+                        "Can only replace ParamSpec with a parameter types list or"
+                        f" another ParamSpec, got {format_type(arg, self.chk.options)}",
+                        ctx,
+                    )
+                    return [AnyType(TypeOfAny.from_error)] * len(vars)
+
         if not vars or not any(isinstance(v, TypeVarTupleType) for v in vars):
             return list(args)
+        assert t.is_type_obj()
+        info = t.type_object()
+        # We reuse the logic from semanal phase to reduce code duplication.
+        fake = Instance(info, args, line=ctx.line, column=ctx.column)
+        if not validate_instance(fake, self.chk.fail):
+            fix_instance(
+                fake, self.chk.fail, self.chk.note, disallow_any=False, options=self.chk.options
+            )
+            args = list(fake.args)
 
         prefix = next(i for (i, v) in enumerate(vars) if isinstance(v, TypeVarTupleType))
         suffix = len(vars) - prefix - 1
-        if len(args) < len(vars) - 1:
-            self.msg.incompatible_type_application(len(vars), len(args), ctx)
-            return [AnyType(TypeOfAny.from_error)] * len(vars)
-
         tvt = vars[prefix]
         assert isinstance(tvt, TypeVarTupleType)
         start, middle, end = split_with_prefix_and_suffix(tuple(args), prefix, suffix)
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index 5b6024da687e..48fadbc96c90 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1977,6 +1977,19 @@ g(cb, y='a', x=0)  # E: Argument "y" to "g" has incompatible type "str"; expecte
                    # E: Argument "x" to "g" has incompatible type "int"; expected "str"
 [builtins fixtures/paramspec.pyi]
 
+[case testParamSpecBadRuntimeTypeApplication]
+from typing import ParamSpec, TypeVar, Generic, Callable
+
+R = TypeVar("R")
+P = ParamSpec("P")
+class C(Generic[P, R]):
+    x: Callable[P, R]
+
+bad = C[int, str]()  # E: Can only replace ParamSpec with a parameter types list or another ParamSpec, got "int"
+reveal_type(bad)  # N: Revealed type is "__main__.C[Any, Any]"
+reveal_type(bad.x)  # N: Revealed type is "def (*Any, **Any) -> Any"
+[builtins fixtures/paramspec.pyi]
+
 [case testParamSpecNoCrashOnUnificationAlias]
 import mod
 [file mod.pyi]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 0212518bdec0..22a30432d098 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -1845,3 +1845,23 @@ def foo2(func: Callable[[Unpack[Args]], T], *args: Unpack[Args2]) -> T:
 def foo3(func: Callable[[int, Unpack[Args2]], T], *args: Unpack[Args2]) -> T:
    return submit(func, 1, *args)
 [builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleRuntimeTypeApplication]
+from typing import Generic, TypeVar, Tuple
+from typing_extensions import Unpack, TypeVarTuple
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+class C(Generic[T, Unpack[Ts], S]): ...
+
+Ints = Tuple[int, int]
+x = C[Unpack[Ints]]()
+reveal_type(x)  # N: Revealed type is "__main__.C[builtins.int, builtins.int]"
+
+y = C[Unpack[Tuple[int, ...]]]()
+reveal_type(y)  # N: Revealed type is "__main__.C[builtins.int, Unpack[builtins.tuple[builtins.int, ...]], builtins.int]"
+
+z = C[int]()  # E: Bad number of arguments, expected: at least 2, given: 1
+reveal_type(z)  # N: Revealed type is "__main__.C[Any, Unpack[builtins.tuple[Any, ...]], Any]"
+[builtins fixtures/tuple.pyi]

From e4355948d797600c7b76da0a916fc5f29d10448e Mon Sep 17 00:00:00 2001
From: Chad Dombrova <chadrik@gmail.com>
Date: Sun, 15 Oct 2023 15:35:20 -0700
Subject: [PATCH 177/288] stubgen: unify C extension and pure python stub
 generators with object oriented design (#15770)

This MR is a major overhaul to `stubgen`. It has been tested extensively
in the process of creating stubs for multiple large and varied libraries
(detailed below).

## User story

The impetus of this change is as follows: as a maintainer of third-party
stubs I do _not_ want to use `stubgen` as a starting point for
hand-editing stub files, I want a framework to regenerate stubs against
upstream changes to a library.

## Summary of Changes

- Introduces an object-oriented design for C extension stub generation,
including a common base class that is shared between inspection-based
and parsing-based stub generation.
- Generally unifies and harmonizes the behavior between inspection and
parsing approaches. For example, function formatting, import tracking,
signature generators, and attribute filtering are now handled with the
same code.
- Adds support for `--include-private` and `--export-less` to
c-extensions (inspection-based generation).
- Adds support for force enabling inspection-based stub generation (the
approach used for C extensions) on pure python code using a new
`--inspect-mode` flag. Useful for packages that employ dynamic function
or class factories. Also makes it possible to generate stubs for
pyc-only modules (yes, this is a real use case)
- Adds an alias `--no-analysis` for `--parse-only` to clarify the
purpose of this option.
- Removes filtering of `__version__` attribute from modules: I've
encountered a number of cases in real-world code that utilize this
attribute.
- Adds a number of tests for inspection mode. Even though these run on
pure python code they increase coverage of the C extension code since it
shares much of hte same code base.

Below I've compiled some basic information about each stub library that
I've created using my changes, and a link to the specialized code for
procedurally generating the stubs.

| Library | code type | other notes |
| --- | --- | --- |
|
[USD](https://github.com/LumaPictures/cg-stubs/blob/master/usd/stubgen_usd.py)
| boost-python | integrates types from doxygen |
|
[katana](https://github.com/LumaPictures/cg-stubs/blob/master/katana/stubgen_katana.py)
| pyc and C extensions | uses epydoc docstrings. has pyi-only packages |
|
[mari](https://github.com/LumaPictures/cg-stubs/blob/master/mari/stubgen_mari.py)
| pure python and C extensions | uses epydoc docstrings |
|
[opencolorio](https://github.com/LumaPictures/cg-stubs/blob/master/ocio/stubgen_ocio.py)
| pybind11 | |
|
[pyside2](https://github.com/LumaPictures/cg-stubs/blob/master/pyside/stubgen_pyside.py)
| shiboken | |
| substance_painter | pure python | basic / non-custom. reads types from
annotations |
| pymel | pure python | integrates types parsed from custom docs |

I know that this is a pretty big PR, and I know it's a lot to go
through, but I've spent a huge amount of time on it and I believe this
makes mypy's stubgen tool the absolute best available. If it helps, I
also have 13 merged mypy PRs under my belt and I'll be around to fix any
issues if they come up.

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Jelle Zijlstra <jelle.zijlstra@gmail.com>
---
 docs/source/stubgen.rst                       |   14 +-
 mypy/moduleinspect.py                         |    4 +
 mypy/stubdoc.py                               |  100 +-
 mypy/stubgen.py                               |  814 ++++-------
 mypy/stubgenc.py                              | 1267 ++++++++++-------
 mypy/stubutil.py                              |  604 +++++++-
 mypy/test/teststubgen.py                      |  455 ++----
 mypy/traverser.py                             |    3 +-
 setup.py                                      |    1 -
 .../stubgen/pybind11_mypy_demo/__init__.pyi   |    1 +
 .../stubgen/pybind11_mypy_demo/basics.pyi     |    8 +-
 test-data/unit/stubgen.test                   |  296 +++-
 12 files changed, 2125 insertions(+), 1442 deletions(-)

diff --git a/docs/source/stubgen.rst b/docs/source/stubgen.rst
index 2de0743572e7..c9e52956379a 100644
--- a/docs/source/stubgen.rst
+++ b/docs/source/stubgen.rst
@@ -127,12 +127,22 @@ alter the default behavior:
     unwanted side effects, such as the running of tests. Stubgen tries to skip test
     modules even without this option, but this does not always work.
 
-.. option:: --parse-only
+.. option:: --no-analysis
 
     Don't perform semantic analysis of source files. This may generate
     worse stubs -- in particular, some module, class, and function aliases may
     be represented as variables with the ``Any`` type. This is generally only
-    useful if semantic analysis causes a critical mypy error.
+    useful if semantic analysis causes a critical mypy error.  Does not apply to
+    C extension modules.  Incompatible with :option:`--inspect-mode`.
+
+.. option:: --inspect-mode
+
+    Import and inspect modules instead of parsing source code. This is the default
+    behavior for C modules and pyc-only packages.  The flag is useful to force
+    inspection for pure Python modules that make use of dynamically generated
+    members that would otherwise be omitted when using the default behavior of
+    code parsing.  Implies :option:`--no-analysis` as analysis requires source
+    code.
 
 .. option:: --doc-dir PATH
 
diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py
index b383fc9dc145..580b31fb4107 100644
--- a/mypy/moduleinspect.py
+++ b/mypy/moduleinspect.py
@@ -39,6 +39,10 @@ def is_c_module(module: ModuleType) -> bool:
     return os.path.splitext(module.__dict__["__file__"])[-1] in [".so", ".pyd", ".dll"]
 
 
+def is_pyc_only(file: str | None) -> bool:
+    return bool(file and file.endswith(".pyc") and not os.path.exists(file[:-1]))
+
+
 class InspectError(Exception):
     pass
 
diff --git a/mypy/stubdoc.py b/mypy/stubdoc.py
index 145f57fd7751..c277573f0b59 100644
--- a/mypy/stubdoc.py
+++ b/mypy/stubdoc.py
@@ -8,11 +8,14 @@
 
 import contextlib
 import io
+import keyword
 import re
 import tokenize
 from typing import Any, Final, MutableMapping, MutableSequence, NamedTuple, Sequence, Tuple
 from typing_extensions import TypeAlias as _TypeAlias
 
+import mypy.util
+
 # Type alias for signatures strings in format ('func_name', '(arg, opt_arg=False)').
 Sig: _TypeAlias = Tuple[str, str]
 
@@ -35,12 +38,16 @@ class ArgSig:
 
     def __init__(self, name: str, type: str | None = None, default: bool = False):
         self.name = name
-        if type and not is_valid_type(type):
-            raise ValueError("Invalid type: " + type)
         self.type = type
         # Does this argument have a default value?
         self.default = default
 
+    def is_star_arg(self) -> bool:
+        return self.name.startswith("*") and not self.name.startswith("**")
+
+    def is_star_kwarg(self) -> bool:
+        return self.name.startswith("**")
+
     def __repr__(self) -> str:
         return "ArgSig(name={}, type={}, default={})".format(
             repr(self.name), repr(self.type), repr(self.default)
@@ -59,7 +66,80 @@ def __eq__(self, other: Any) -> bool:
 class FunctionSig(NamedTuple):
     name: str
     args: list[ArgSig]
-    ret_type: str
+    ret_type: str | None
+
+    def is_special_method(self) -> bool:
+        return bool(
+            self.name.startswith("__")
+            and self.name.endswith("__")
+            and self.args
+            and self.args[0].name in ("self", "cls")
+        )
+
+    def has_catchall_args(self) -> bool:
+        """Return if this signature has catchall args: (*args, **kwargs)"""
+        if self.args and self.args[0].name in ("self", "cls"):
+            args = self.args[1:]
+        else:
+            args = self.args
+        return (
+            len(args) == 2
+            and all(a.type in (None, "object", "Any", "typing.Any") for a in args)
+            and args[0].is_star_arg()
+            and args[1].is_star_kwarg()
+        )
+
+    def is_catchall_signature(self) -> bool:
+        """Return if this signature is the catchall identity: (*args, **kwargs) -> Any"""
+        return self.has_catchall_args() and self.ret_type in (None, "Any", "typing.Any")
+
+    def format_sig(
+        self,
+        indent: str = "",
+        is_async: bool = False,
+        any_val: str | None = None,
+        docstring: str | None = None,
+    ) -> str:
+        args: list[str] = []
+        for arg in self.args:
+            arg_def = arg.name
+
+            if arg_def in keyword.kwlist:
+                arg_def = "_" + arg_def
+
+            if (
+                arg.type is None
+                and any_val is not None
+                and arg.name not in ("self", "cls")
+                and not arg.name.startswith("*")
+            ):
+                arg_type: str | None = any_val
+            else:
+                arg_type = arg.type
+            if arg_type:
+                arg_def += ": " + arg_type
+                if arg.default:
+                    arg_def += " = ..."
+
+            elif arg.default:
+                arg_def += "=..."
+
+            args.append(arg_def)
+
+        retfield = ""
+        ret_type = self.ret_type if self.ret_type else any_val
+        if ret_type is not None:
+            retfield = " -> " + ret_type
+
+        prefix = "async " if is_async else ""
+        sig = "{indent}{prefix}def {name}({args}){ret}:".format(
+            indent=indent, prefix=prefix, name=self.name, args=", ".join(args), ret=retfield
+        )
+        if docstring:
+            suffix = f"\n{indent}    {mypy.util.quote_docstring(docstring)}"
+        else:
+            suffix = " ..."
+        return f"{sig}{suffix}"
 
 
 # States of the docstring parser.
@@ -176,17 +256,17 @@ def add_token(self, token: tokenize.TokenInfo) -> None:
 
             # arg_name is empty when there are no args. e.g. func()
             if self.arg_name:
-                try:
+                if self.arg_type and not is_valid_type(self.arg_type):
+                    # wrong type, use Any
+                    self.args.append(
+                        ArgSig(name=self.arg_name, type=None, default=bool(self.arg_default))
+                    )
+                else:
                     self.args.append(
                         ArgSig(
                             name=self.arg_name, type=self.arg_type, default=bool(self.arg_default)
                         )
                     )
-                except ValueError:
-                    # wrong type, use Any
-                    self.args.append(
-                        ArgSig(name=self.arg_name, type=None, default=bool(self.arg_default))
-                    )
             self.arg_name = ""
             self.arg_type = None
             self.arg_default = None
@@ -240,7 +320,7 @@ def args_kwargs(signature: FunctionSig) -> bool:
 
 
 def infer_sig_from_docstring(docstr: str | None, name: str) -> list[FunctionSig] | None:
-    """Convert function signature to list of TypedFunctionSig
+    """Convert function signature to list of FunctionSig
 
     Look for function signatures of function in docstring. Signature is a string of
     the format <function_name>(<signature>) -> <return type> or perhaps without
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index e8c12ee4d99b..395a49fa4e08 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -7,7 +7,7 @@
   - or use mypy's mechanisms, if importing is prohibited
 * (optionally) semantically analysing the sources using mypy (as a single set)
 * emitting the stubs text:
-  - for Python modules: from ASTs using StubGenerator
+  - for Python modules: from ASTs using ASTStubGenerator
   - for C modules using runtime introspection and (optionally) Sphinx docs
 
 During first and third steps some problematic files can be skipped, but any
@@ -42,14 +42,12 @@
 from __future__ import annotations
 
 import argparse
-import glob
 import keyword
 import os
 import os.path
 import sys
 import traceback
-from collections import defaultdict
-from typing import Final, Iterable, Mapping
+from typing import Final, Iterable
 
 import mypy.build
 import mypy.mixedtraverser
@@ -66,7 +64,7 @@
     SearchPaths,
     default_lib_path,
 )
-from mypy.moduleinspect import ModuleInspect
+from mypy.moduleinspect import ModuleInspect, is_pyc_only
 from mypy.nodes import (
     ARG_NAMED,
     ARG_POS,
@@ -85,6 +83,7 @@
     DictExpr,
     EllipsisExpr,
     Expression,
+    ExpressionStmt,
     FloatExpr,
     FuncBase,
     FuncDef,
@@ -109,20 +108,19 @@
     Var,
 )
 from mypy.options import Options as MypyOptions
-from mypy.stubdoc import Sig, find_unique_signatures, parse_all_signatures
-from mypy.stubgenc import (
-    DocstringSignatureGenerator,
-    ExternalSignatureGenerator,
-    FallbackSignatureGenerator,
-    SignatureGenerator,
-    generate_stub_for_c_module,
-)
+from mypy.stubdoc import ArgSig, FunctionSig
+from mypy.stubgenc import InspectionStubGenerator, generate_stub_for_c_module
 from mypy.stubutil import (
+    BaseStubGenerator,
     CantImport,
+    ClassInfo,
+    FunctionContext,
     common_dir_prefix,
     fail_missing,
     find_module_path_and_all_py3,
     generate_guarded,
+    infer_method_arg_types,
+    infer_method_ret_type,
     remove_misplaced_type_comments,
     report_missing,
     walk_packages,
@@ -140,19 +138,13 @@
     AnyType,
     CallableType,
     Instance,
-    NoneType,
     TupleType,
     Type,
-    TypeList,
-    TypeStrVisitor,
     UnboundType,
-    UnionType,
     get_proper_type,
 )
 from mypy.visitor import NodeVisitor
 
-TYPING_MODULE_NAMES: Final = ("typing", "typing_extensions")
-
 # Common ways of naming package containing vendored modules.
 VENDOR_PACKAGES: Final = ["packages", "vendor", "vendored", "_vendor", "_vendored_packages"]
 
@@ -165,32 +157,6 @@
     "/_vendored_packages/",
 ]
 
-# Special-cased names that are implicitly exported from the stub (from m import y as y).
-EXTRA_EXPORTED: Final = {
-    "pyasn1_modules.rfc2437.univ",
-    "pyasn1_modules.rfc2459.char",
-    "pyasn1_modules.rfc2459.univ",
-}
-
-# These names should be omitted from generated stubs.
-IGNORED_DUNDERS: Final = {
-    "__all__",
-    "__author__",
-    "__version__",
-    "__about__",
-    "__copyright__",
-    "__email__",
-    "__license__",
-    "__summary__",
-    "__title__",
-    "__uri__",
-    "__str__",
-    "__repr__",
-    "__getstate__",
-    "__setstate__",
-    "__slots__",
-}
-
 # These methods are expected to always return a non-trivial value.
 METHODS_WITH_RETURN_VALUE: Final = {
     "__ne__",
@@ -203,22 +169,6 @@
     "__iter__",
 }
 
-# These magic methods always return the same type.
-KNOWN_MAGIC_METHODS_RETURN_TYPES: Final = {
-    "__len__": "int",
-    "__length_hint__": "int",
-    "__init__": "None",
-    "__del__": "None",
-    "__bool__": "bool",
-    "__bytes__": "bytes",
-    "__format__": "str",
-    "__contains__": "bool",
-    "__complex__": "complex",
-    "__int__": "int",
-    "__float__": "float",
-    "__index__": "int",
-}
-
 
 class Options:
     """Represents stubgen options.
@@ -230,6 +180,7 @@ def __init__(
         self,
         pyversion: tuple[int, int],
         no_import: bool,
+        inspect: bool,
         doc_dir: str,
         search_path: list[str],
         interpreter: str,
@@ -248,6 +199,7 @@ def __init__(
         # See parse_options for descriptions of the flags.
         self.pyversion = pyversion
         self.no_import = no_import
+        self.inspect = inspect
         self.doc_dir = doc_dir
         self.search_path = search_path
         self.interpreter = interpreter
@@ -279,6 +231,9 @@ def __init__(
         self.runtime_all = runtime_all
         self.ast: MypyFile | None = None
 
+    def __repr__(self) -> str:
+        return f"StubSource({self.source})"
+
     @property
     def module(self) -> str:
         return self.source.module
@@ -303,71 +258,13 @@ def path(self) -> str | None:
 ERROR_MARKER: Final = "<ERROR>"
 
 
-class AnnotationPrinter(TypeStrVisitor):
-    """Visitor used to print existing annotations in a file.
-
-    The main difference from TypeStrVisitor is a better treatment of
-    unbound types.
-
-    Notes:
-    * This visitor doesn't add imports necessary for annotations, this is done separately
-      by ImportTracker.
-    * It can print all kinds of types, but the generated strings may not be valid (notably
-      callable types) since it prints the same string that reveal_type() does.
-    * For Instance types it prints the fully qualified names.
-    """
-
-    # TODO: Generate valid string representation for callable types.
-    # TODO: Use short names for Instances.
-    def __init__(self, stubgen: StubGenerator) -> None:
-        super().__init__(options=mypy.options.Options())
-        self.stubgen = stubgen
-
-    def visit_any(self, t: AnyType) -> str:
-        s = super().visit_any(t)
-        self.stubgen.import_tracker.require_name(s)
-        return s
-
-    def visit_unbound_type(self, t: UnboundType) -> str:
-        s = t.name
-        self.stubgen.import_tracker.require_name(s)
-        if t.args:
-            s += f"[{self.args_str(t.args)}]"
-        return s
-
-    def visit_none_type(self, t: NoneType) -> str:
-        return "None"
-
-    def visit_type_list(self, t: TypeList) -> str:
-        return f"[{self.list_str(t.items)}]"
-
-    def visit_union_type(self, t: UnionType) -> str:
-        return " | ".join([item.accept(self) for item in t.items])
-
-    def args_str(self, args: Iterable[Type]) -> str:
-        """Convert an array of arguments to strings and join the results with commas.
-
-        The main difference from list_str is the preservation of quotes for string
-        arguments
-        """
-        types = ["builtins.bytes", "builtins.str"]
-        res = []
-        for arg in args:
-            arg_str = arg.accept(self)
-            if isinstance(arg, UnboundType) and arg.original_str_fallback in types:
-                res.append(f"'{arg_str}'")
-            else:
-                res.append(arg_str)
-        return ", ".join(res)
-
-
 class AliasPrinter(NodeVisitor[str]):
     """Visitor used to collect type aliases _and_ type variable definitions.
 
     Visit r.h.s of the definition to get the string representation of type alias.
     """
 
-    def __init__(self, stubgen: StubGenerator) -> None:
+    def __init__(self, stubgen: ASTStubGenerator) -> None:
         self.stubgen = stubgen
         super().__init__()
 
@@ -435,124 +332,6 @@ def visit_op_expr(self, o: OpExpr) -> str:
         return f"{o.left.accept(self)} {o.op} {o.right.accept(self)}"
 
 
-class ImportTracker:
-    """Record necessary imports during stub generation."""
-
-    def __init__(self) -> None:
-        # module_for['foo'] has the module name where 'foo' was imported from, or None if
-        # 'foo' is a module imported directly; examples
-        #     'from pkg.m import f as foo' ==> module_for['foo'] == 'pkg.m'
-        #     'from m import f' ==> module_for['f'] == 'm'
-        #     'import m' ==> module_for['m'] == None
-        #     'import pkg.m' ==> module_for['pkg.m'] == None
-        #                    ==> module_for['pkg'] == None
-        self.module_for: dict[str, str | None] = {}
-
-        # direct_imports['foo'] is the module path used when the name 'foo' was added to the
-        # namespace.
-        #   import foo.bar.baz  ==> direct_imports['foo'] == 'foo.bar.baz'
-        #                       ==> direct_imports['foo.bar'] == 'foo.bar.baz'
-        #                       ==> direct_imports['foo.bar.baz'] == 'foo.bar.baz'
-        self.direct_imports: dict[str, str] = {}
-
-        # reverse_alias['foo'] is the name that 'foo' had originally when imported with an
-        # alias; examples
-        #     'import numpy as np' ==> reverse_alias['np'] == 'numpy'
-        #     'import foo.bar as bar' ==> reverse_alias['bar'] == 'foo.bar'
-        #     'from decimal import Decimal as D' ==> reverse_alias['D'] == 'Decimal'
-        self.reverse_alias: dict[str, str] = {}
-
-        # required_names is the set of names that are actually used in a type annotation
-        self.required_names: set[str] = set()
-
-        # Names that should be reexported if they come from another module
-        self.reexports: set[str] = set()
-
-    def add_import_from(self, module: str, names: list[tuple[str, str | None]]) -> None:
-        for name, alias in names:
-            if alias:
-                # 'from {module} import {name} as {alias}'
-                self.module_for[alias] = module
-                self.reverse_alias[alias] = name
-            else:
-                # 'from {module} import {name}'
-                self.module_for[name] = module
-                self.reverse_alias.pop(name, None)
-            self.direct_imports.pop(alias or name, None)
-
-    def add_import(self, module: str, alias: str | None = None) -> None:
-        if alias:
-            # 'import {module} as {alias}'
-            self.module_for[alias] = None
-            self.reverse_alias[alias] = module
-        else:
-            # 'import {module}'
-            name = module
-            # add module and its parent packages
-            while name:
-                self.module_for[name] = None
-                self.direct_imports[name] = module
-                self.reverse_alias.pop(name, None)
-                name = name.rpartition(".")[0]
-
-    def require_name(self, name: str) -> None:
-        while name not in self.direct_imports and "." in name:
-            name = name.rsplit(".", 1)[0]
-        self.required_names.add(name)
-
-    def reexport(self, name: str) -> None:
-        """Mark a given non qualified name as needed in __all__.
-
-        This means that in case it comes from a module, it should be
-        imported with an alias even is the alias is the same as the name.
-        """
-        self.require_name(name)
-        self.reexports.add(name)
-
-    def import_lines(self) -> list[str]:
-        """The list of required import lines (as strings with python code)."""
-        result = []
-
-        # To summarize multiple names imported from a same module, we collect those
-        # in the `module_map` dictionary, mapping a module path to the list of names that should
-        # be imported from it. the names can also be alias in the form 'original as alias'
-        module_map: Mapping[str, list[str]] = defaultdict(list)
-
-        for name in sorted(
-            self.required_names,
-            key=lambda n: (self.reverse_alias[n], n) if n in self.reverse_alias else (n, ""),
-        ):
-            # If we haven't seen this name in an import statement, ignore it
-            if name not in self.module_for:
-                continue
-
-            m = self.module_for[name]
-            if m is not None:
-                # This name was found in a from ... import ...
-                # Collect the name in the module_map
-                if name in self.reverse_alias:
-                    name = f"{self.reverse_alias[name]} as {name}"
-                elif name in self.reexports:
-                    name = f"{name} as {name}"
-                module_map[m].append(name)
-            else:
-                # This name was found in an import ...
-                # We can already generate the import line
-                if name in self.reverse_alias:
-                    source = self.reverse_alias[name]
-                    result.append(f"import {source} as {name}\n")
-                elif name in self.reexports:
-                    assert "." not in name  # Because reexports only has nonqualified names
-                    result.append(f"import {name} as {name}\n")
-                else:
-                    result.append(f"import {name}\n")
-
-        # Now generate all the from ... import ... lines collected in module_map
-        for module, names in sorted(module_map.items()):
-            result.append(f"from {module} import {', '.join(sorted(names))}\n")
-        return result
-
-
 def find_defined_names(file: MypyFile) -> set[str]:
     finder = DefinitionFinder()
     file.accept(finder)
@@ -583,6 +362,10 @@ def find_referenced_names(file: MypyFile) -> set[str]:
     return finder.refs
 
 
+def is_none_expr(expr: Expression) -> bool:
+    return isinstance(expr, NameExpr) and expr.name == "None"
+
+
 class ReferenceFinder(mypy.mixedtraverser.MixedTraverserVisitor):
     """Find all name references (both local and global)."""
 
@@ -625,74 +408,37 @@ def add_ref(self, fullname: str) -> None:
             self.refs.add(fullname)
 
 
-class StubGenerator(mypy.traverser.TraverserVisitor):
+class ASTStubGenerator(BaseStubGenerator, mypy.traverser.TraverserVisitor):
     """Generate stub text from a mypy AST."""
 
     def __init__(
         self,
-        _all_: list[str] | None,
+        _all_: list[str] | None = None,
         include_private: bool = False,
         analyzed: bool = False,
         export_less: bool = False,
         include_docstrings: bool = False,
     ) -> None:
-        # Best known value of __all__.
-        self._all_ = _all_
-        self._output: list[str] = []
+        super().__init__(_all_, include_private, export_less, include_docstrings)
         self._decorators: list[str] = []
-        self._import_lines: list[str] = []
-        # Current indent level (indent is hardcoded to 4 spaces).
-        self._indent = ""
         # Stack of defined variables (per scope).
         self._vars: list[list[str]] = [[]]
         # What was generated previously in the stub file.
         self._state = EMPTY
-        self._toplevel_names: list[str] = []
-        self._include_private = include_private
-        self._include_docstrings = include_docstrings
         self._current_class: ClassDef | None = None
-        self.import_tracker = ImportTracker()
         # Was the tree semantically analysed before?
         self.analyzed = analyzed
-        # Disable implicit exports of package-internal imports?
-        self.export_less = export_less
-        # Add imports that could be implicitly generated
-        self.import_tracker.add_import_from("typing", [("NamedTuple", None)])
-        # Names in __all__ are required
-        for name in _all_ or ():
-            if name not in IGNORED_DUNDERS:
-                self.import_tracker.reexport(name)
-        self.defined_names: set[str] = set()
         # Short names of methods defined in the body of the current class
         self.method_names: set[str] = set()
         self.processing_dataclass = False
 
     def visit_mypy_file(self, o: MypyFile) -> None:
-        self.module = o.fullname  # Current module being processed
+        self.module_name = o.fullname  # Current module being processed
         self.path = o.path
-        self.defined_names = find_defined_names(o)
+        self.set_defined_names(find_defined_names(o))
         self.referenced_names = find_referenced_names(o)
-        known_imports = {
-            "_typeshed": ["Incomplete"],
-            "typing": ["Any", "TypeVar", "NamedTuple"],
-            "collections.abc": ["Generator"],
-            "typing_extensions": ["TypedDict", "ParamSpec", "TypeVarTuple"],
-        }
-        for pkg, imports in known_imports.items():
-            for t in imports:
-                if t not in self.defined_names:
-                    alias = None
-                else:
-                    alias = "_" + t
-                self.import_tracker.add_import_from(pkg, [(t, alias)])
         super().visit_mypy_file(o)
-        undefined_names = [name for name in self._all_ or [] if name not in self._toplevel_names]
-        if undefined_names:
-            if self._state != EMPTY:
-                self.add("\n")
-            self.add("# Names in __all__ with no definition:\n")
-            for name in sorted(undefined_names):
-                self.add(f"#   {name}\n")
+        self.check_undefined_names()
 
     def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None:
         """@property with setters and getters, @overload chain and some others."""
@@ -714,38 +460,14 @@ def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None:
                 # skip the overload implementation and clear the decorator we just processed
                 self.clear_decorators()
 
-    def visit_func_def(self, o: FuncDef) -> None:
-        is_dataclass_generated = (
-            self.analyzed and self.processing_dataclass and o.info.names[o.name].plugin_generated
-        )
-        if is_dataclass_generated and o.name != "__init__":
-            # Skip methods generated by the @dataclass decorator (except for __init__)
-            return
-        if (
-            self.is_private_name(o.name, o.fullname)
-            or self.is_not_in_all(o.name)
-            or (self.is_recorded_name(o.name) and not o.is_overload)
-        ):
-            self.clear_decorators()
-            return
-        if not self._indent and self._state not in (EMPTY, FUNC) and not o.is_awaitable_coroutine:
-            self.add("\n")
-        if not self.is_top_level():
-            self_inits = find_self_initializers(o)
-            for init, value in self_inits:
-                if init in self.method_names:
-                    # Can't have both an attribute and a method/property with the same name.
-                    continue
-                init_code = self.get_init(init, value)
-                if init_code:
-                    self.add(init_code)
-        # dump decorators, just before "def ..."
-        for s in self._decorators:
-            self.add(s)
-        self.clear_decorators()
-        self.add(f"{self._indent}{'async ' if o.is_coroutine else ''}def {o.name}(")
-        self.record_name(o.name)
-        args: list[str] = []
+    def get_default_function_sig(self, func_def: FuncDef, ctx: FunctionContext) -> FunctionSig:
+        args = self._get_func_args(func_def, ctx)
+        retname = self._get_func_return(func_def, ctx)
+        return FunctionSig(func_def.name, args, retname)
+
+    def _get_func_args(self, o: FuncDef, ctx: FunctionContext) -> list[ArgSig]:
+        args: list[ArgSig] = []
+
         for i, arg_ in enumerate(o.arguments):
             var = arg_.variable
             kind = arg_.kind
@@ -759,87 +481,146 @@ def visit_func_def(self, o: FuncDef) -> None:
             # name their 0th argument other than self/cls
             is_self_arg = i == 0 and name == "self"
             is_cls_arg = i == 0 and name == "cls"
-            annotation = ""
+            typename: str | None = None
             if annotated_type and not is_self_arg and not is_cls_arg:
                 # Luckily, an argument explicitly annotated with "Any" has
                 # type "UnboundType" and will not match.
                 if not isinstance(get_proper_type(annotated_type), AnyType):
-                    annotation = f": {self.print_annotation(annotated_type)}"
+                    typename = self.print_annotation(annotated_type)
 
-            if kind.is_named() and not any(arg.startswith("*") for arg in args):
-                args.append("*")
+            if kind.is_named() and not any(arg.name.startswith("*") for arg in args):
+                args.append(ArgSig("*"))
 
             if arg_.initializer:
-                if not annotation:
+                if not typename:
                     typename = self.get_str_type_of_node(arg_.initializer, True, False)
-                    if typename == "":
-                        annotation = "=..."
-                    else:
-                        annotation = f": {typename} = ..."
-                else:
-                    annotation += " = ..."
-                arg = name + annotation
             elif kind == ARG_STAR:
-                arg = f"*{name}{annotation}"
+                name = f"*{name}"
             elif kind == ARG_STAR2:
-                arg = f"**{name}{annotation}"
-            else:
-                arg = name + annotation
-            args.append(arg)
-        if o.name == "__init__" and is_dataclass_generated and "**" in args:
-            # The dataclass plugin generates invalid nameless "*" and "**" arguments
-            new_name = "".join(a.split(":", 1)[0] for a in args).replace("*", "")
-            args[args.index("*")] = f"*{new_name}_"  # this name is guaranteed to be unique
-            args[args.index("**")] = f"**{new_name}__"  # same here
+                name = f"**{name}"
+
+            args.append(ArgSig(name, typename, default=bool(arg_.initializer)))
+
+        if ctx.class_info is not None and all(
+            arg.type is None and arg.default is False for arg in args
+        ):
+            new_args = infer_method_arg_types(
+                ctx.name, ctx.class_info.self_var, [arg.name for arg in args]
+            )
+            if new_args is not None:
+                args = new_args
 
-        retname = None
+        is_dataclass_generated = (
+            self.analyzed and self.processing_dataclass and o.info.names[o.name].plugin_generated
+        )
+        if o.name == "__init__" and is_dataclass_generated and "**" in [a.name for a in args]:
+            # The dataclass plugin generates invalid nameless "*" and "**" arguments
+            new_name = "".join(a.name.strip("*") for a in args)
+            for arg in args:
+                if arg.name == "*":
+                    arg.name = f"*{new_name}_"  # this name is guaranteed to be unique
+                elif arg.name == "**":
+                    arg.name = f"**{new_name}__"  # same here
+        return args
+
+    def _get_func_return(self, o: FuncDef, ctx: FunctionContext) -> str | None:
         if o.name != "__init__" and isinstance(o.unanalyzed_type, CallableType):
             if isinstance(get_proper_type(o.unanalyzed_type.ret_type), AnyType):
                 # Luckily, a return type explicitly annotated with "Any" has
                 # type "UnboundType" and will enter the else branch.
-                retname = None  # implicit Any
+                return None  # implicit Any
             else:
-                retname = self.print_annotation(o.unanalyzed_type.ret_type)
-        elif o.abstract_status == IS_ABSTRACT or o.name in METHODS_WITH_RETURN_VALUE:
+                return self.print_annotation(o.unanalyzed_type.ret_type)
+        if o.abstract_status == IS_ABSTRACT or o.name in METHODS_WITH_RETURN_VALUE:
             # Always assume abstract methods return Any unless explicitly annotated. Also
             # some dunder methods should not have a None return type.
-            retname = None  # implicit Any
-        elif o.name in KNOWN_MAGIC_METHODS_RETURN_TYPES:
-            retname = KNOWN_MAGIC_METHODS_RETURN_TYPES[o.name]
-        elif has_yield_expression(o) or has_yield_from_expression(o):
-            generator_name = self.add_typing_import("Generator")
+            return None  # implicit Any
+        retname = infer_method_ret_type(o.name)
+        if retname is not None:
+            return retname
+        if has_yield_expression(o) or has_yield_from_expression(o):
+            generator_name = self.add_name("collections.abc.Generator")
             yield_name = "None"
             send_name = "None"
             return_name = "None"
             if has_yield_from_expression(o):
-                yield_name = send_name = self.add_typing_import("Incomplete")
+                yield_name = send_name = self.add_name("_typeshed.Incomplete")
             else:
                 for expr, in_assignment in all_yield_expressions(o):
-                    if expr.expr is not None and not self.is_none_expr(expr.expr):
-                        yield_name = self.add_typing_import("Incomplete")
+                    if expr.expr is not None and not is_none_expr(expr.expr):
+                        yield_name = self.add_name("_typeshed.Incomplete")
                     if in_assignment:
-                        send_name = self.add_typing_import("Incomplete")
+                        send_name = self.add_name("_typeshed.Incomplete")
             if has_return_statement(o):
-                return_name = self.add_typing_import("Incomplete")
-            retname = f"{generator_name}[{yield_name}, {send_name}, {return_name}]"
-        elif not has_return_statement(o) and o.abstract_status == NOT_ABSTRACT:
-            retname = "None"
-        retfield = ""
-        if retname is not None:
-            retfield = " -> " + retname
+                return_name = self.add_name("_typeshed.Incomplete")
+            return f"{generator_name}[{yield_name}, {send_name}, {return_name}]"
+        if not has_return_statement(o) and o.abstract_status == NOT_ABSTRACT:
+            return "None"
+        return None
+
+    def _get_func_docstring(self, node: FuncDef) -> str | None:
+        if not node.body.body:
+            return None
+        expr = node.body.body[0]
+        if isinstance(expr, ExpressionStmt) and isinstance(expr.expr, StrExpr):
+            return expr.expr.value
+        return None
 
-        self.add(", ".join(args))
-        self.add(f"){retfield}:")
-        if self._include_docstrings and o.docstring:
-            docstring = mypy.util.quote_docstring(o.docstring)
-            self.add(f"\n{self._indent}    {docstring}\n")
+    def visit_func_def(self, o: FuncDef) -> None:
+        is_dataclass_generated = (
+            self.analyzed and self.processing_dataclass and o.info.names[o.name].plugin_generated
+        )
+        if is_dataclass_generated and o.name != "__init__":
+            # Skip methods generated by the @dataclass decorator (except for __init__)
+            return
+        if (
+            self.is_private_name(o.name, o.fullname)
+            or self.is_not_in_all(o.name)
+            or (self.is_recorded_name(o.name) and not o.is_overload)
+        ):
+            self.clear_decorators()
+            return
+        if self.is_top_level() and self._state not in (EMPTY, FUNC):
+            self.add("\n")
+        if not self.is_top_level():
+            self_inits = find_self_initializers(o)
+            for init, value in self_inits:
+                if init in self.method_names:
+                    # Can't have both an attribute and a method/property with the same name.
+                    continue
+                init_code = self.get_init(init, value)
+                if init_code:
+                    self.add(init_code)
+
+        if self._current_class is not None:
+            if len(o.arguments):
+                self_var = o.arguments[0].variable.name
+            else:
+                self_var = "self"
+            class_info = ClassInfo(self._current_class.name, self_var)
         else:
-            self.add(" ...\n")
+            class_info = None
+
+        ctx = FunctionContext(
+            module_name=self.module_name,
+            name=o.name,
+            docstring=self._get_func_docstring(o),
+            is_abstract=o.abstract_status != NOT_ABSTRACT,
+            class_info=class_info,
+        )
 
-        self._state = FUNC
+        self.record_name(o.name)
 
-    def is_none_expr(self, expr: Expression) -> bool:
-        return isinstance(expr, NameExpr) and expr.name == "None"
+        default_sig = self.get_default_function_sig(o, ctx)
+        sigs = self.get_signatures(default_sig, self.sig_generators, ctx)
+
+        for output in self.format_func_def(
+            sigs, is_coroutine=o.is_coroutine, decorators=self._decorators, docstring=ctx.docstring
+        ):
+            self.add(output + "\n")
+
+        self.clear_decorators()
+        self._state = FUNC
 
     def visit_decorator(self, o: Decorator) -> None:
         if self.is_private_name(o.func.name, o.func.fullname):
@@ -917,13 +698,12 @@ def visit_class_def(self, o: ClassDef) -> None:
         self._current_class = o
         self.method_names = find_method_names(o.defs.body)
         sep: int | None = None
-        if not self._indent and self._state != EMPTY:
+        if self.is_top_level() and self._state != EMPTY:
             sep = len(self._output)
             self.add("\n")
         decorators = self.get_class_decorators(o)
         for d in decorators:
             self.add(f"{self._indent}@{d}\n")
-        self.add(f"{self._indent}class {o.name}")
         self.record_name(o.name)
         base_types = self.get_base_types(o)
         if base_types:
@@ -936,17 +716,16 @@ def visit_class_def(self, o: ClassDef) -> None:
             base_types.append("metaclass=abc.ABCMeta")
             self.import_tracker.add_import("abc")
             self.import_tracker.require_name("abc")
-        if base_types:
-            self.add(f"({', '.join(base_types)})")
-        self.add(":\n")
-        self._indent += "    "
+        bases = f"({', '.join(base_types)})" if base_types else ""
+        self.add(f"{self._indent}class {o.name}{bases}:\n")
+        self.indent()
         if self._include_docstrings and o.docstring:
             docstring = mypy.util.quote_docstring(o.docstring)
             self.add(f"{self._indent}{docstring}\n")
         n = len(self._output)
         self._vars.append([])
         super().visit_class_def(o)
-        self._indent = self._indent[:-4]
+        self.dedent()
         self._vars.pop()
         self._vars[-1].append(o.name)
         if len(self._output) == n:
@@ -987,17 +766,17 @@ def get_base_types(self, cdef: ClassDef) -> list[str]:
                     typename = base.args[0].value
                     if nt_fields is None:
                         # Invalid namedtuple() call, cannot determine fields
-                        base_types.append(self.add_typing_import("Incomplete"))
+                        base_types.append(self.add_name("_typeshed.Incomplete"))
                         continue
                     fields_str = ", ".join(f"({f!r}, {t})" for f, t in nt_fields)
-                    namedtuple_name = self.add_typing_import("NamedTuple")
+                    namedtuple_name = self.add_name("typing.NamedTuple")
                     base_types.append(f"{namedtuple_name}({typename!r}, [{fields_str}])")
                 elif self.is_typed_namedtuple(base):
                     base_types.append(base.accept(p))
                 else:
                     # At this point, we don't know what the base class is, so we
                     # just use Incomplete as the base class.
-                    base_types.append(self.add_typing_import("Incomplete"))
+                    base_types.append(self.add_name("_typeshed.Incomplete"))
         for name, value in cdef.keywords.items():
             if name == "metaclass":
                 continue  # handled separately
@@ -1063,7 +842,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
                     init = self.get_init(item.name, o.rvalue, annotation)
                     if init:
                         found = True
-                        if not sep and not self._indent and self._state not in (EMPTY, VAR):
+                        if not sep and self.is_top_level() and self._state not in (EMPTY, VAR):
                             init = "\n" + init
                             sep = True
                         self.add(init)
@@ -1092,10 +871,12 @@ def _get_namedtuple_fields(self, call: CallExpr) -> list[tuple[str, str]] | None
                     field_names.append(field.value)
             else:
                 return None  # Invalid namedtuple fields type
-            if not field_names:
+            if field_names:
+                incomplete = self.add_name("_typeshed.Incomplete")
+                return [(field_name, incomplete) for field_name in field_names]
+            else:
                 return []
-            incomplete = self.add_typing_import("Incomplete")
-            return [(field_name, incomplete) for field_name in field_names]
+
         elif self.is_typed_namedtuple(call):
             fields_arg = call.args[1]
             if not isinstance(fields_arg, (ListExpr, TupleExpr)):
@@ -1125,7 +906,7 @@ def process_namedtuple(self, lvalue: NameExpr, rvalue: CallExpr) -> None:
         if fields is None:
             self.annotate_as_incomplete(lvalue)
             return
-        bases = self.add_typing_import("NamedTuple")
+        bases = self.add_name("typing.NamedTuple")
         # TODO: Add support for generic NamedTuples. Requires `Generic` as base class.
         class_def = f"{self._indent}class {lvalue.name}({bases}):"
         if len(fields) == 0:
@@ -1175,13 +956,13 @@ def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None:
                     total = arg
                 else:
                     items.append((arg_name, arg))
-        bases = self.add_typing_import("TypedDict")
         p = AliasPrinter(self)
         if any(not key.isidentifier() or keyword.iskeyword(key) for key, _ in items):
             # Keep the call syntax if there are non-identifier or reserved keyword keys.
             self.add(f"{self._indent}{lvalue.name} = {rvalue.accept(p)}\n")
             self._state = VAR
         else:
+            bases = self.add_name("typing_extensions.TypedDict")
             # TODO: Add support for generic TypedDicts. Requires `Generic` as base class.
             if total is not None:
                 bases += f", total={total.accept(p)}"
@@ -1198,7 +979,8 @@ def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None:
                 self._state = CLASS
 
     def annotate_as_incomplete(self, lvalue: NameExpr) -> None:
-        self.add(f"{self._indent}{lvalue.name}: {self.add_typing_import('Incomplete')}\n")
+        incomplete = self.add_name("_typeshed.Incomplete")
+        self.add(f"{self._indent}{lvalue.name}: {incomplete}\n")
         self._state = VAR
 
     def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool:
@@ -1280,9 +1062,9 @@ def visit_import_from(self, o: ImportFrom) -> None:
         exported_names: set[str] = set()
         import_names = []
         module, relative = translate_module_name(o.id, o.relative)
-        if self.module:
+        if self.module_name:
             full_module, ok = mypy.util.correct_relative_import(
-                self.module, relative, module, self.path.endswith(".__init__.py")
+                self.module_name, relative, module, self.path.endswith(".__init__.py")
             )
             if not ok:
                 full_module = module
@@ -1295,37 +1077,7 @@ def visit_import_from(self, o: ImportFrom) -> None:
                 # Vendored six -- translate into plain 'import six'.
                 self.visit_import(Import([("six", None)]))
                 continue
-            exported = False
-            if as_name is None and self.module and (self.module + "." + name) in EXTRA_EXPORTED:
-                # Special case certain names that should be exported, against our general rules.
-                exported = True
-            is_private = self.is_private_name(name, full_module + "." + name)
-            if (
-                as_name is None
-                and name not in self.referenced_names
-                and not any(n.startswith(name + ".") for n in self.referenced_names)
-                and (not self._all_ or name in IGNORED_DUNDERS)
-                and not is_private
-                and module not in ("abc", "asyncio") + TYPING_MODULE_NAMES
-            ):
-                # An imported name that is never referenced in the module is assumed to be
-                # exported, unless there is an explicit __all__. Note that we need to special
-                # case 'abc' since some references are deleted during semantic analysis.
-                exported = True
-            top_level = full_module.split(".", 1)[0]
-            self_top_level = self.module.split(".", 1)[0]
-            if (
-                as_name is None
-                and not self.export_less
-                and (not self._all_ or name in IGNORED_DUNDERS)
-                and self.module
-                and not is_private
-                and top_level in (self_top_level, "_" + self_top_level)
-            ):
-                # Export imports from the same package, since we can't reliably tell whether they
-                # are part of the public API.
-                exported = True
-            if exported:
+            if self.should_reexport(name, full_module, as_name is not None):
                 self.import_tracker.reexport(name)
                 as_name = name
             import_names.append((name, as_name))
@@ -1339,7 +1091,7 @@ def visit_import_from(self, o: ImportFrom) -> None:
             names = [
                 name
                 for name, alias in o.names
-                if name in self._all_ and alias is None and name not in IGNORED_DUNDERS
+                if name in self._all_ and alias is None and name not in self.IGNORED_DUNDERS
             ]
             exported_names.update(names)
 
@@ -1373,7 +1125,7 @@ def get_init(
                 isinstance(annotation, UnboundType)
                 and not annotation.args
                 and annotation.name == "Final"
-                and self.import_tracker.module_for.get("Final") in TYPING_MODULE_NAMES
+                and self.import_tracker.module_for.get("Final") in self.TYPING_MODULE_NAMES
             ):
                 # Final without type argument is invalid in stubs.
                 final_arg = self.get_str_type_of_node(rvalue)
@@ -1406,67 +1158,14 @@ def get_assign_initializer(self, rvalue: Expression) -> str:
         # By default, no initializer is required:
         return ""
 
-    def add(self, string: str) -> None:
-        """Add text to generated stub."""
-        self._output.append(string)
-
     def add_decorator(self, name: str, require_name: bool = False) -> None:
         if require_name:
             self.import_tracker.require_name(name)
-        if not self._indent and self._state not in (EMPTY, FUNC):
-            self._decorators.append("\n")
-        self._decorators.append(f"{self._indent}@{name}\n")
+        self._decorators.append(f"@{name}")
 
     def clear_decorators(self) -> None:
         self._decorators.clear()
 
-    def typing_name(self, name: str) -> str:
-        if name in self.defined_names:
-            # Avoid name clash between name from typing and a name defined in stub.
-            return "_" + name
-        else:
-            return name
-
-    def add_typing_import(self, name: str) -> str:
-        """Add a name to be imported for typing, unless it's imported already.
-
-        The import will be internal to the stub.
-        """
-        name = self.typing_name(name)
-        self.import_tracker.require_name(name)
-        return name
-
-    def add_import_line(self, line: str) -> None:
-        """Add a line of text to the import section, unless it's already there."""
-        if line not in self._import_lines:
-            self._import_lines.append(line)
-
-    def output(self) -> str:
-        """Return the text for the stub."""
-        imports = ""
-        if self._import_lines:
-            imports += "".join(self._import_lines)
-        imports += "".join(self.import_tracker.import_lines())
-        if imports and self._output:
-            imports += "\n"
-        return imports + "".join(self._output)
-
-    def is_not_in_all(self, name: str) -> bool:
-        if self.is_private_name(name):
-            return False
-        if self._all_:
-            return self.is_top_level() and name not in self._all_
-        return False
-
-    def is_private_name(self, name: str, fullname: str | None = None) -> bool:
-        if self._include_private:
-            return False
-        if fullname in EXTRA_EXPORTED:
-            return False
-        if name == "_":
-            return False
-        return name.startswith("_") and (not name.endswith("__") or name in IGNORED_DUNDERS)
-
     def is_private_member(self, fullname: str) -> bool:
         parts = fullname.split(".")
         return any(self.is_private_name(part) for part in parts)
@@ -1494,9 +1193,9 @@ def get_str_type_of_node(
         if isinstance(rvalue, NameExpr) and rvalue.name in ("True", "False"):
             return "bool"
         if can_infer_optional and isinstance(rvalue, NameExpr) and rvalue.name == "None":
-            return f"{self.add_typing_import('Incomplete')} | None"
+            return f"{self.add_name('_typeshed.Incomplete')} | None"
         if can_be_any:
-            return self.add_typing_import("Incomplete")
+            return self.add_name("_typeshed.Incomplete")
         else:
             return ""
 
@@ -1534,25 +1233,20 @@ def maybe_unwrap_unary_expr(self, expr: Expression) -> Expression:
         # This is some other unary expr, we cannot do anything with it (yet?).
         return expr
 
-    def print_annotation(self, t: Type) -> str:
-        printer = AnnotationPrinter(self)
-        return t.accept(printer)
-
-    def is_top_level(self) -> bool:
-        """Are we processing the top level of a file?"""
-        return self._indent == ""
-
-    def record_name(self, name: str) -> None:
-        """Mark a name as defined.
-
-        This only does anything if at the top level of a module.
-        """
-        if self.is_top_level():
-            self._toplevel_names.append(name)
-
-    def is_recorded_name(self, name: str) -> bool:
-        """Has this name been recorded previously?"""
-        return self.is_top_level() and name in self._toplevel_names
+    def should_reexport(self, name: str, full_module: str, name_is_alias: bool) -> bool:
+        is_private = self.is_private_name(name, full_module + "." + name)
+        if (
+            not name_is_alias
+            and name not in self.referenced_names
+            and (not self._all_ or name in self.IGNORED_DUNDERS)
+            and not is_private
+            and full_module not in ("abc", "asyncio") + self.TYPING_MODULE_NAMES
+        ):
+            # An imported name that is never referenced in the module is assumed to be
+            # exported, unless there is an explicit __all__. Note that we need to special
+            # case 'abc' since some references are deleted during semantic analysis.
+            return True
+        return super().should_reexport(name, full_module, name_is_alias)
 
 
 def find_method_names(defs: list[Statement]) -> set[str]:
@@ -1608,6 +1302,17 @@ def remove_blacklisted_modules(modules: list[StubSource]) -> list[StubSource]:
     ]
 
 
+def split_pyc_from_py(modules: list[StubSource]) -> tuple[list[StubSource], list[StubSource]]:
+    py_modules = []
+    pyc_modules = []
+    for mod in modules:
+        if is_pyc_only(mod.path):
+            pyc_modules.append(mod)
+        else:
+            py_modules.append(mod)
+    return pyc_modules, py_modules
+
+
 def is_blacklisted_path(path: str) -> bool:
     return any(substr in (normalize_path_separators(path) + "\n") for substr in BLACKLIST)
 
@@ -1620,10 +1325,10 @@ def normalize_path_separators(path: str) -> str:
 
 def collect_build_targets(
     options: Options, mypy_opts: MypyOptions
-) -> tuple[list[StubSource], list[StubSource]]:
+) -> tuple[list[StubSource], list[StubSource], list[StubSource]]:
     """Collect files for which we need to generate stubs.
 
-    Return list of Python modules and C modules.
+    Return list of py modules, pyc modules, and C modules.
     """
     if options.packages or options.modules:
         if options.no_import:
@@ -1646,8 +1351,8 @@ def collect_build_targets(
         c_modules = []
 
     py_modules = remove_blacklisted_modules(py_modules)
-
-    return py_modules, c_modules
+    pyc_mod, py_mod = split_pyc_from_py(py_modules)
+    return py_mod, pyc_mod, c_modules
 
 
 def find_module_paths_using_imports(
@@ -1826,98 +1531,90 @@ def generate_asts_for_modules(
             mod.runtime_all = res.manager.semantic_analyzer.export_map[mod.module]
 
 
-def generate_stub_from_ast(
+def generate_stub_for_py_module(
     mod: StubSource,
     target: str,
+    *,
     parse_only: bool = False,
+    inspect: bool = False,
     include_private: bool = False,
     export_less: bool = False,
     include_docstrings: bool = False,
+    doc_dir: str = "",
+    all_modules: list[str],
 ) -> None:
     """Use analysed (or just parsed) AST to generate type stub for single file.
 
     If directory for target doesn't exist it will created. Existing stub
     will be overwritten.
     """
-    gen = StubGenerator(
-        mod.runtime_all,
-        include_private=include_private,
-        analyzed=not parse_only,
-        export_less=export_less,
-        include_docstrings=include_docstrings,
-    )
-    assert mod.ast is not None, "This function must be used only with analyzed modules"
-    mod.ast.accept(gen)
+    if inspect:
+        ngen = InspectionStubGenerator(
+            module_name=mod.module,
+            known_modules=all_modules,
+            _all_=mod.runtime_all,
+            doc_dir=doc_dir,
+            include_private=include_private,
+            export_less=export_less,
+            include_docstrings=include_docstrings,
+        )
+        ngen.generate_module()
+        output = ngen.output()
+
+    else:
+        gen = ASTStubGenerator(
+            mod.runtime_all,
+            include_private=include_private,
+            analyzed=not parse_only,
+            export_less=export_less,
+            include_docstrings=include_docstrings,
+        )
+        assert mod.ast is not None, "This function must be used only with analyzed modules"
+        mod.ast.accept(gen)
+        output = gen.output()
 
     # Write output to file.
     subdir = os.path.dirname(target)
     if subdir and not os.path.isdir(subdir):
         os.makedirs(subdir)
     with open(target, "w") as file:
-        file.write("".join(gen.output()))
-
-
-def get_sig_generators(options: Options) -> list[SignatureGenerator]:
-    sig_generators: list[SignatureGenerator] = [
-        DocstringSignatureGenerator(),
-        FallbackSignatureGenerator(),
-    ]
-    if options.doc_dir:
-        # Collect info from docs (if given). Always check these first.
-        sigs, class_sigs = collect_docs_signatures(options.doc_dir)
-        sig_generators.insert(0, ExternalSignatureGenerator(sigs, class_sigs))
-    return sig_generators
-
-
-def collect_docs_signatures(doc_dir: str) -> tuple[dict[str, str], dict[str, str]]:
-    """Gather all function and class signatures in the docs.
-
-    Return a tuple (function signatures, class signatures).
-    Currently only used for C modules.
-    """
-    all_sigs: list[Sig] = []
-    all_class_sigs: list[Sig] = []
-    for path in glob.glob(f"{doc_dir}/*.rst"):
-        with open(path) as f:
-            loc_sigs, loc_class_sigs = parse_all_signatures(f.readlines())
-        all_sigs += loc_sigs
-        all_class_sigs += loc_class_sigs
-    sigs = dict(find_unique_signatures(all_sigs))
-    class_sigs = dict(find_unique_signatures(all_class_sigs))
-    return sigs, class_sigs
+        file.write(output)
 
 
 def generate_stubs(options: Options) -> None:
     """Main entry point for the program."""
     mypy_opts = mypy_options(options)
-    py_modules, c_modules = collect_build_targets(options, mypy_opts)
-    sig_generators = get_sig_generators(options)
+    py_modules, pyc_modules, c_modules = collect_build_targets(options, mypy_opts)
+    all_modules = py_modules + pyc_modules + c_modules
+    all_module_names = sorted(m.module for m in all_modules)
     # Use parsed sources to generate stubs for Python modules.
     generate_asts_for_modules(py_modules, options.parse_only, mypy_opts, options.verbose)
     files = []
-    for mod in py_modules:
+    for mod in py_modules + pyc_modules:
         assert mod.path is not None, "Not found module was not skipped"
         target = mod.module.replace(".", "/")
-        if os.path.basename(mod.path) == "__init__.py":
+        if os.path.basename(mod.path) in ["__init__.py", "__init__.pyc"]:
             target += "/__init__.pyi"
         else:
             target += ".pyi"
         target = os.path.join(options.output_dir, target)
         files.append(target)
         with generate_guarded(mod.module, target, options.ignore_errors, options.verbose):
-            generate_stub_from_ast(
+            generate_stub_for_py_module(
                 mod,
                 target,
-                options.parse_only,
-                options.include_private,
-                options.export_less,
+                parse_only=options.parse_only,
+                inspect=options.inspect or mod in pyc_modules,
+                include_private=options.include_private,
+                export_less=options.export_less,
                 include_docstrings=options.include_docstrings,
+                doc_dir=options.doc_dir,
+                all_modules=all_module_names,
             )
 
     # Separately analyse C modules using different logic.
-    all_modules = sorted(m.module for m in (py_modules + c_modules))
     for mod in c_modules:
-        if any(py_mod.module.startswith(mod.module + ".") for py_mod in py_modules + c_modules):
+        if any(py_mod.module.startswith(mod.module + ".") for py_mod in all_modules):
             target = mod.module.replace(".", "/") + "/__init__.pyi"
         else:
             target = mod.module.replace(".", "/") + ".pyi"
@@ -1927,11 +1624,12 @@ def generate_stubs(options: Options) -> None:
             generate_stub_for_c_module(
                 mod.module,
                 target,
-                known_modules=all_modules,
-                sig_generators=sig_generators,
-                include_docstrings=options.include_docstrings,
+                known_modules=all_module_names,
+                doc_dir=options.doc_dir,
+                include_private=options.include_private,
+                export_less=options.export_less,
             )
-    num_modules = len(py_modules) + len(c_modules)
+    num_modules = len(all_modules)
     if not options.quiet and num_modules > 0:
         print("Processed %d modules" % num_modules)
         if len(files) == 1:
@@ -1967,10 +1665,21 @@ def parse_options(args: list[str]) -> Options:
         "respect __all__)",
     )
     parser.add_argument(
+        "--no-analysis",
         "--parse-only",
+        dest="parse_only",
         action="store_true",
         help="don't perform semantic analysis of sources, just parse them "
-        "(only applies to Python modules, might affect quality of stubs)",
+        "(only applies to Python modules, might affect quality of stubs. "
+        "Not compatible with --inspect)",
+    )
+    parser.add_argument(
+        "--inspect-mode",
+        dest="inspect",
+        action="store_true",
+        help="import and inspect modules instead of parsing source code."
+        "This is the default behavior for c modules and pyc-only packages, but "
+        "it is also useful for pure python modules with dynamically generated members.",
     )
     parser.add_argument(
         "--include-private",
@@ -2047,6 +1756,8 @@ def parse_options(args: list[str]) -> Options:
         parser.error("May only specify one of: modules/packages or files.")
     if ns.quiet and ns.verbose:
         parser.error("Cannot specify both quiet and verbose messages")
+    if ns.inspect and ns.parse_only:
+        parser.error("Cannot specify both --parse-only/--no-analysis and --inspect-mode")
 
     # Create the output folder if it doesn't already exist.
     if not os.path.exists(ns.output_dir):
@@ -2055,6 +1766,7 @@ def parse_options(args: list[str]) -> Options:
     return Options(
         pyversion=pyversion,
         no_import=ns.no_import,
+        inspect=ns.inspect,
         doc_dir=ns.doc_dir,
         search_path=ns.search_path.split(":"),
         interpreter=ns.interpreter,
diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py
index 31487f9d0dcf..0ad79a4265b3 100755
--- a/mypy/stubgenc.py
+++ b/mypy/stubgenc.py
@@ -6,68 +6,38 @@
 
 from __future__ import annotations
 
+import glob
 import importlib
 import inspect
+import keyword
 import os.path
-import re
-from abc import abstractmethod
-from types import ModuleType
-from typing import Any, Final, Iterable, Mapping
+from types import FunctionType, ModuleType
+from typing import Any, Mapping
 
-import mypy.util
+from mypy.fastparse import parse_type_comment
 from mypy.moduleinspect import is_c_module
 from mypy.stubdoc import (
     ArgSig,
     FunctionSig,
+    Sig,
+    find_unique_signatures,
     infer_arg_sig_from_anon_docstring,
     infer_prop_type_from_docstring,
     infer_ret_type_sig_from_anon_docstring,
     infer_ret_type_sig_from_docstring,
     infer_sig_from_docstring,
+    parse_all_signatures,
 )
-
-# Members of the typing module to consider for importing by default.
-_DEFAULT_TYPING_IMPORTS: Final = (
-    "Any",
-    "Callable",
-    "ClassVar",
-    "Dict",
-    "Iterable",
-    "Iterator",
-    "List",
-    "Optional",
-    "Tuple",
-    "Union",
+from mypy.stubutil import (
+    BaseStubGenerator,
+    ClassInfo,
+    FunctionContext,
+    SignatureGenerator,
+    infer_method_arg_types,
+    infer_method_ret_type,
 )
 
 
-class SignatureGenerator:
-    """Abstract base class for extracting a list of FunctionSigs for each function."""
-
-    def remove_self_type(
-        self, inferred: list[FunctionSig] | None, self_var: str
-    ) -> list[FunctionSig] | None:
-        """Remove type annotation from self/cls argument"""
-        if inferred:
-            for signature in inferred:
-                if signature.args:
-                    if signature.args[0].name == self_var:
-                        signature.args[0].type = None
-        return inferred
-
-    @abstractmethod
-    def get_function_sig(
-        self, func: object, module_name: str, name: str
-    ) -> list[FunctionSig] | None:
-        pass
-
-    @abstractmethod
-    def get_method_sig(
-        self, cls: type, func: object, module_name: str, class_name: str, name: str, self_var: str
-    ) -> list[FunctionSig] | None:
-        pass
-
-
 class ExternalSignatureGenerator(SignatureGenerator):
     def __init__(
         self, func_sigs: dict[str, str] | None = None, class_sigs: dict[str, str] | None = None
@@ -79,97 +49,104 @@ class signatures (usually corresponds to __init__).
         self.func_sigs = func_sigs or {}
         self.class_sigs = class_sigs or {}
 
-    def get_function_sig(
-        self, func: object, module_name: str, name: str
-    ) -> list[FunctionSig] | None:
-        if name in self.func_sigs:
-            return [
-                FunctionSig(
-                    name=name,
-                    args=infer_arg_sig_from_anon_docstring(self.func_sigs[name]),
-                    ret_type="Any",
-                )
-            ]
-        else:
-            return None
+    @classmethod
+    def from_doc_dir(cls, doc_dir: str) -> ExternalSignatureGenerator:
+        """Instantiate from a directory of .rst files."""
+        all_sigs: list[Sig] = []
+        all_class_sigs: list[Sig] = []
+        for path in glob.glob(f"{doc_dir}/*.rst"):
+            with open(path) as f:
+                loc_sigs, loc_class_sigs = parse_all_signatures(f.readlines())
+            all_sigs += loc_sigs
+            all_class_sigs += loc_class_sigs
+        sigs = dict(find_unique_signatures(all_sigs))
+        class_sigs = dict(find_unique_signatures(all_class_sigs))
+        return ExternalSignatureGenerator(sigs, class_sigs)
 
-    def get_method_sig(
-        self, cls: type, func: object, module_name: str, class_name: str, name: str, self_var: str
+    def get_function_sig(
+        self, default_sig: FunctionSig, ctx: FunctionContext
     ) -> list[FunctionSig] | None:
+        # method:
         if (
-            name in ("__new__", "__init__")
-            and name not in self.func_sigs
-            and class_name in self.class_sigs
+            ctx.class_info
+            and ctx.name in ("__new__", "__init__")
+            and ctx.name not in self.func_sigs
+            and ctx.class_info.name in self.class_sigs
         ):
             return [
                 FunctionSig(
-                    name=name,
-                    args=infer_arg_sig_from_anon_docstring(self.class_sigs[class_name]),
-                    ret_type=infer_method_ret_type(name),
+                    name=ctx.name,
+                    args=infer_arg_sig_from_anon_docstring(self.class_sigs[ctx.class_info.name]),
+                    ret_type=infer_method_ret_type(ctx.name),
                 )
             ]
-        inferred = self.get_function_sig(func, module_name, name)
-        return self.remove_self_type(inferred, self_var)
+
+        # function:
+        if ctx.name not in self.func_sigs:
+            return None
+
+        inferred = [
+            FunctionSig(
+                name=ctx.name,
+                args=infer_arg_sig_from_anon_docstring(self.func_sigs[ctx.name]),
+                ret_type=None,
+            )
+        ]
+        if ctx.class_info:
+            return self.remove_self_type(inferred, ctx.class_info.self_var)
+        else:
+            return inferred
+
+    def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> str | None:
+        return None
 
 
 class DocstringSignatureGenerator(SignatureGenerator):
     def get_function_sig(
-        self, func: object, module_name: str, name: str
+        self, default_sig: FunctionSig, ctx: FunctionContext
     ) -> list[FunctionSig] | None:
-        docstr = getattr(func, "__doc__", None)
-        inferred = infer_sig_from_docstring(docstr, name)
+        inferred = infer_sig_from_docstring(ctx.docstring, ctx.name)
         if inferred:
-            assert docstr is not None
-            if is_pybind11_overloaded_function_docstring(docstr, name):
+            assert ctx.docstring is not None
+            if is_pybind11_overloaded_function_docstring(ctx.docstring, ctx.name):
                 # Remove pybind11 umbrella (*args, **kwargs) for overloaded functions
                 del inferred[-1]
-        return inferred
 
-    def get_method_sig(
-        self,
-        cls: type,
-        func: object,
-        module_name: str,
-        class_name: str,
-        func_name: str,
-        self_var: str,
-    ) -> list[FunctionSig] | None:
-        inferred = self.get_function_sig(func, module_name, func_name)
-        if not inferred and func_name == "__init__":
-            # look for class-level constructor signatures of the form <class_name>(<signature>)
-            inferred = self.get_function_sig(cls, module_name, class_name)
-        return self.remove_self_type(inferred, self_var)
+        if ctx.class_info:
+            if not inferred and ctx.name == "__init__":
+                # look for class-level constructor signatures of the form <class_name>(<signature>)
+                inferred = infer_sig_from_docstring(ctx.class_info.docstring, ctx.class_info.name)
+                if inferred:
+                    inferred = [sig._replace(name="__init__") for sig in inferred]
+            return self.remove_self_type(inferred, ctx.class_info.self_var)
+        else:
+            return inferred
 
+    def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> str | None:
+        """Infer property type from docstring or docstring signature."""
+        if ctx.docstring is not None:
+            inferred = infer_ret_type_sig_from_anon_docstring(ctx.docstring)
+            if not inferred:
+                inferred = infer_ret_type_sig_from_docstring(ctx.docstring, ctx.name)
+            if not inferred:
+                inferred = infer_prop_type_from_docstring(ctx.docstring)
+            return inferred
+        else:
+            return None
 
-class FallbackSignatureGenerator(SignatureGenerator):
-    def get_function_sig(
-        self, func: object, module_name: str, name: str
-    ) -> list[FunctionSig] | None:
-        return [
-            FunctionSig(
-                name=name,
-                args=infer_arg_sig_from_anon_docstring("(*args, **kwargs)"),
-                ret_type="Any",
-            )
-        ]
 
-    def get_method_sig(
-        self, cls: type, func: object, module_name: str, class_name: str, name: str, self_var: str
-    ) -> list[FunctionSig] | None:
-        return [
-            FunctionSig(
-                name=name,
-                args=infer_method_args(name, self_var),
-                ret_type=infer_method_ret_type(name),
-            )
-        ]
+def is_pybind11_overloaded_function_docstring(docstring: str, name: str) -> bool:
+    return docstring.startswith(f"{name}(*args, **kwargs)\nOverloaded function.\n\n")
 
 
 def generate_stub_for_c_module(
     module_name: str,
     target: str,
     known_modules: list[str],
-    sig_generators: Iterable[SignatureGenerator],
+    doc_dir: str = "",
+    *,
+    include_private: bool = False,
+    export_less: bool = False,
     include_docstrings: bool = False,
 ) -> None:
     """Generate stub for C module.
@@ -184,452 +161,664 @@ def generate_stub_for_c_module(
     If directory for target doesn't exist it will be created. Existing stub
     will be overwritten.
     """
-    module = importlib.import_module(module_name)
-    assert is_c_module(module), f"{module_name} is not a C module"
     subdir = os.path.dirname(target)
     if subdir and not os.path.isdir(subdir):
         os.makedirs(subdir)
-    imports: list[str] = []
-    functions: list[str] = []
-    done = set()
-    items = sorted(get_members(module), key=lambda x: x[0])
-    for name, obj in items:
-        if is_c_function(obj):
-            generate_c_function_stub(
-                module,
-                name,
-                obj,
-                output=functions,
-                known_modules=known_modules,
-                imports=imports,
-                sig_generators=sig_generators,
-                include_docstrings=include_docstrings,
-            )
-            done.add(name)
-    types: list[str] = []
-    for name, obj in items:
-        if name.startswith("__") and name.endswith("__"):
-            continue
-        if is_c_type(obj):
-            generate_c_type_stub(
-                module,
-                name,
-                obj,
-                output=types,
-                known_modules=known_modules,
-                imports=imports,
-                sig_generators=sig_generators,
-                include_docstrings=include_docstrings,
-            )
-            done.add(name)
-    variables = []
-    for name, obj in items:
-        if name.startswith("__") and name.endswith("__"):
-            continue
-        if name not in done and not inspect.ismodule(obj):
-            type_str = strip_or_import(
-                get_type_fullname(type(obj)), module, known_modules, imports
-            )
-            variables.append(f"{name}: {type_str}")
-    output = sorted(set(imports))
-    for line in variables:
-        output.append(line)
-    for line in types:
-        if line.startswith("class") and output and output[-1]:
-            output.append("")
-        output.append(line)
-    if output and functions:
-        output.append("")
-    for line in functions:
-        output.append(line)
-    output = add_typing_import(output)
+
+    gen = InspectionStubGenerator(
+        module_name,
+        known_modules,
+        doc_dir,
+        include_private=include_private,
+        export_less=export_less,
+        include_docstrings=include_docstrings,
+    )
+    gen.generate_module()
+    output = gen.output()
+
     with open(target, "w") as file:
-        for line in output:
-            file.write(f"{line}\n")
-
-
-def add_typing_import(output: list[str]) -> list[str]:
-    """Add typing imports for collections/types that occur in the generated stub."""
-    names = []
-    for name in _DEFAULT_TYPING_IMPORTS:
-        if any(re.search(r"\b%s\b" % name, line) for line in output):
-            names.append(name)
-    if names:
-        return [f"from typing import {', '.join(names)}", ""] + output
-    else:
-        return output.copy()
-
-
-def get_members(obj: object) -> list[tuple[str, Any]]:
-    obj_dict: Mapping[str, Any] = getattr(obj, "__dict__")  # noqa: B009
-    results = []
-    for name in obj_dict:
-        if is_skipped_attribute(name):
-            continue
-        # Try to get the value via getattr
-        try:
-            value = getattr(obj, name)
-        except AttributeError:
-            continue
-        else:
-            results.append((name, value))
-    return results
+        file.write(output)
 
 
-def is_c_function(obj: object) -> bool:
-    return inspect.isbuiltin(obj) or type(obj) is type(ord)
+class CFunctionStub:
+    """
+    Class that mimics a C function in order to provide parseable docstrings.
+    """
 
+    def __init__(self, name: str, doc: str, is_abstract: bool = False):
+        self.__name__ = name
+        self.__doc__ = doc
+        self.__abstractmethod__ = is_abstract
 
-def is_c_method(obj: object) -> bool:
-    return inspect.ismethoddescriptor(obj) or type(obj) in (
-        type(str.index),
-        type(str.__add__),
-        type(str.__new__),
-    )
+    @classmethod
+    def _from_sig(cls, sig: FunctionSig, is_abstract: bool = False) -> CFunctionStub:
+        return CFunctionStub(sig.name, sig.format_sig()[:-4], is_abstract)
 
+    @classmethod
+    def _from_sigs(cls, sigs: list[FunctionSig], is_abstract: bool = False) -> CFunctionStub:
+        return CFunctionStub(
+            sigs[0].name, "\n".join(sig.format_sig()[:-4] for sig in sigs), is_abstract
+        )
 
-def is_c_classmethod(obj: object) -> bool:
-    return inspect.isbuiltin(obj) or type(obj).__name__ in (
-        "classmethod",
-        "classmethod_descriptor",
-    )
+    def __get__(self) -> None:
+        """
+        This exists to make this object look like a method descriptor and thus
+        return true for CStubGenerator.ismethod()
+        """
+        pass
 
 
-def is_c_property(obj: object) -> bool:
-    return inspect.isdatadescriptor(obj) or hasattr(obj, "fget")
+class InspectionStubGenerator(BaseStubGenerator):
+    """Stub generator that does not parse code.
 
+    Generation is performed by inspecting the module's contents, and thus works
+    for highly dynamic modules, pyc files, and C modules (via the CStubGenerator
+    subclass).
+    """
 
-def is_c_property_readonly(prop: Any) -> bool:
-    return hasattr(prop, "fset") and prop.fset is None
+    def __init__(
+        self,
+        module_name: str,
+        known_modules: list[str],
+        doc_dir: str = "",
+        _all_: list[str] | None = None,
+        include_private: bool = False,
+        export_less: bool = False,
+        include_docstrings: bool = False,
+        module: ModuleType | None = None,
+    ) -> None:
+        self.doc_dir = doc_dir
+        if module is None:
+            self.module = importlib.import_module(module_name)
+        else:
+            self.module = module
+        self.is_c_module = is_c_module(self.module)
+        self.known_modules = known_modules
+        self.resort_members = self.is_c_module
+        super().__init__(_all_, include_private, export_less, include_docstrings)
+        self.module_name = module_name
+
+    def get_default_function_sig(self, func: object, ctx: FunctionContext) -> FunctionSig:
+        argspec = None
+        if not self.is_c_module:
+            # Get the full argument specification of the function
+            try:
+                argspec = inspect.getfullargspec(func)
+            except TypeError:
+                # some callables cannot be inspected, e.g. functools.partial
+                pass
+        if argspec is None:
+            if ctx.class_info is not None:
+                # method:
+                return FunctionSig(
+                    name=ctx.name,
+                    args=infer_c_method_args(ctx.name, ctx.class_info.self_var),
+                    ret_type=infer_method_ret_type(ctx.name),
+                )
+            else:
+                # function:
+                return FunctionSig(
+                    name=ctx.name,
+                    args=[ArgSig(name="*args"), ArgSig(name="**kwargs")],
+                    ret_type=None,
+                )
 
+        # Extract the function arguments, defaults, and varargs
+        args = argspec.args
+        defaults = argspec.defaults
+        varargs = argspec.varargs
+        kwargs = argspec.varkw
+        annotations = argspec.annotations
+
+        def get_annotation(key: str) -> str | None:
+            if key not in annotations:
+                return None
+            argtype = annotations[key]
+            if argtype is None:
+                return "None"
+            if not isinstance(argtype, str):
+                return self.get_type_fullname(argtype)
+            return argtype
+
+        arglist: list[ArgSig] = []
+        # Add the arguments to the signature
+        for i, arg in enumerate(args):
+            # Check if the argument has a default value
+            if defaults and i >= len(args) - len(defaults):
+                default_value = defaults[i - (len(args) - len(defaults))]
+                if arg in annotations:
+                    argtype = annotations[arg]
+                else:
+                    argtype = self.get_type_annotation(default_value)
+                    if argtype == "None":
+                        # None is not a useful annotation, but we can infer that the arg
+                        # is optional
+                        incomplete = self.add_name("_typeshed.Incomplete")
+                        argtype = f"{incomplete} | None"
+                arglist.append(ArgSig(arg, argtype, default=True))
+            else:
+                arglist.append(ArgSig(arg, get_annotation(arg), default=False))
 
-def is_c_type(obj: object) -> bool:
-    return inspect.isclass(obj) or type(obj) is type(int)
+        # Add *args if present
+        if varargs:
+            arglist.append(ArgSig(f"*{varargs}", get_annotation(varargs)))
 
+        # Add **kwargs if present
+        if kwargs:
+            arglist.append(ArgSig(f"**{kwargs}", get_annotation(kwargs)))
 
-def is_pybind11_overloaded_function_docstring(docstr: str, name: str) -> bool:
-    return docstr.startswith(f"{name}(*args, **kwargs)\n" + "Overloaded function.\n\n")
+        # add types for known special methods
+        if ctx.class_info is not None and all(
+            arg.type is None and arg.default is False for arg in arglist
+        ):
+            new_args = infer_method_arg_types(
+                ctx.name, ctx.class_info.self_var, [arg.name for arg in arglist if arg.name]
+            )
+            if new_args is not None:
+                arglist = new_args
 
+        ret_type = get_annotation("return") or infer_method_ret_type(ctx.name)
+        return FunctionSig(ctx.name, arglist, ret_type)
 
-def generate_c_function_stub(
-    module: ModuleType,
-    name: str,
-    obj: object,
-    *,
-    known_modules: list[str],
-    sig_generators: Iterable[SignatureGenerator],
-    output: list[str],
-    imports: list[str],
-    self_var: str | None = None,
-    cls: type | None = None,
-    class_name: str | None = None,
-    include_docstrings: bool = False,
-) -> None:
-    """Generate stub for a single function or method.
+    def get_sig_generators(self) -> list[SignatureGenerator]:
+        if not self.is_c_module:
+            return []
+        else:
+            sig_generators: list[SignatureGenerator] = [DocstringSignatureGenerator()]
+            if self.doc_dir:
+                # Collect info from docs (if given). Always check these first.
+                sig_generators.insert(0, ExternalSignatureGenerator.from_doc_dir(self.doc_dir))
+            return sig_generators
 
-    The result will be appended to 'output'.
-    If necessary, any required names will be added to 'imports'.
-    The 'class_name' is used to find signature of __init__ or __new__ in
-    'class_sigs'.
-    """
-    inferred: list[FunctionSig] | None = None
-    docstr: str | None = None
-    if class_name:
-        # method:
-        assert cls is not None, "cls should be provided for methods"
-        assert self_var is not None, "self_var should be provided for methods"
-        for sig_gen in sig_generators:
-            inferred = sig_gen.get_method_sig(
-                cls, obj, module.__name__, class_name, name, self_var
+    def strip_or_import(self, type_name: str) -> str:
+        """Strips unnecessary module names from typ.
+
+        If typ represents a type that is inside module or is a type coming from builtins, remove
+        module declaration from it. Return stripped name of the type.
+
+        Arguments:
+            typ: name of the type
+        """
+        local_modules = ["builtins", self.module_name]
+        parsed_type = parse_type_comment(type_name, 0, 0, None)[1]
+        assert parsed_type is not None, type_name
+        return self.print_annotation(parsed_type, self.known_modules, local_modules)
+
+    def get_obj_module(self, obj: object) -> str | None:
+        """Return module name of the object."""
+        return getattr(obj, "__module__", None)
+
+    def is_defined_in_module(self, obj: object) -> bool:
+        """Check if object is considered defined in the current module."""
+        module = self.get_obj_module(obj)
+        return module is None or module == self.module_name
+
+    def generate_module(self) -> None:
+        all_items = self.get_members(self.module)
+        if self.resort_members:
+            all_items = sorted(all_items, key=lambda x: x[0])
+        items = []
+        for name, obj in all_items:
+            if inspect.ismodule(obj) and obj.__name__ in self.known_modules:
+                module_name = obj.__name__
+                if module_name.startswith(self.module_name + "."):
+                    # from {.rel_name} import {mod_name} as {name}
+                    pkg_name, mod_name = module_name.rsplit(".", 1)
+                    rel_module = pkg_name[len(self.module_name) :] or "."
+                    self.import_tracker.add_import_from(rel_module, [(mod_name, name)])
+                    self.import_tracker.reexport(name)
+                else:
+                    # import {module_name} as {name}
+                    self.import_tracker.add_import(module_name, name)
+                    self.import_tracker.reexport(name)
+            elif self.is_defined_in_module(obj) and not inspect.ismodule(obj):
+                # process this below
+                items.append((name, obj))
+            else:
+                # from {obj_module} import {obj_name}
+                obj_module_name = self.get_obj_module(obj)
+                if obj_module_name:
+                    self.import_tracker.add_import_from(obj_module_name, [(name, None)])
+                    if self.should_reexport(name, obj_module_name, name_is_alias=False):
+                        self.import_tracker.reexport(name)
+
+        self.set_defined_names(set([name for name, obj in all_items if not inspect.ismodule(obj)]))
+
+        if self.resort_members:
+            functions: list[str] = []
+            types: list[str] = []
+            variables: list[str] = []
+        else:
+            output: list[str] = []
+            functions = types = variables = output
+
+        for name, obj in items:
+            if self.is_function(obj):
+                self.generate_function_stub(name, obj, output=functions)
+            elif inspect.isclass(obj):
+                self.generate_class_stub(name, obj, output=types)
+            else:
+                self.generate_variable_stub(name, obj, output=variables)
+
+        self._output = []
+
+        if self.resort_members:
+            for line in variables:
+                self._output.append(line + "\n")
+            for line in types:
+                if line.startswith("class") and self._output and self._output[-1]:
+                    self._output.append("\n")
+                self._output.append(line + "\n")
+            if self._output and functions:
+                self._output.append("\n")
+            for line in functions:
+                self._output.append(line + "\n")
+        else:
+            for i, line in enumerate(output):
+                if (
+                    self._output
+                    and line.startswith("class")
+                    and (
+                        not self._output[-1].startswith("class")
+                        or (len(output) > i + 1 and output[i + 1].startswith("    "))
+                    )
+                ) or (
+                    self._output
+                    and self._output[-1].startswith("def")
+                    and not line.startswith("def")
+                ):
+                    self._output.append("\n")
+                self._output.append(line + "\n")
+        self.check_undefined_names()
+
+    def is_skipped_attribute(self, attr: str) -> bool:
+        return (
+            attr
+            in (
+                "__class__",
+                "__getattribute__",
+                "__str__",
+                "__repr__",
+                "__doc__",
+                "__dict__",
+                "__module__",
+                "__weakref__",
+                "__annotations__",
             )
-            if inferred:
-                # add self/cls var, if not present
-                for sig in inferred:
-                    if not sig.args or sig.args[0].name not in ("self", "cls"):
-                        sig.args.insert(0, ArgSig(name=self_var))
-                break
-    else:
-        # function:
-        for sig_gen in sig_generators:
-            inferred = sig_gen.get_function_sig(obj, module.__name__, name)
-            if inferred:
-                break
-
-    if not inferred:
-        raise ValueError(
-            "No signature was found. This should never happen "
-            "if FallbackSignatureGenerator is provided"
+            or attr in self.IGNORED_DUNDERS
+            or is_pybind_skipped_attribute(attr)  # For pickling
+            or keyword.iskeyword(attr)
         )
 
-    is_overloaded = len(inferred) > 1 if inferred else False
-    if is_overloaded:
-        imports.append("from typing import overload")
-    if inferred:
-        for signature in inferred:
-            args: list[str] = []
-            for arg in signature.args:
-                arg_def = arg.name
-                if arg_def == "None":
-                    arg_def = "_none"  # None is not a valid argument name
-
-                if arg.type:
-                    arg_def += ": " + strip_or_import(arg.type, module, known_modules, imports)
-
-                if arg.default:
-                    arg_def += " = ..."
-
-                args.append(arg_def)
-
-            if is_overloaded:
-                output.append("@overload")
-            # a sig generator indicates @classmethod by specifying the cls arg
-            if class_name and signature.args and signature.args[0].name == "cls":
-                output.append("@classmethod")
-            output_signature = "def {function}({args}) -> {ret}:".format(
-                function=name,
-                args=", ".join(args),
-                ret=strip_or_import(signature.ret_type, module, known_modules, imports),
-            )
-            if include_docstrings and docstr:
-                docstr_quoted = mypy.util.quote_docstring(docstr.strip())
-                docstr_indented = "\n    ".join(docstr_quoted.split("\n"))
-                output.append(output_signature)
-                output.extend(f"    {docstr_indented}".split("\n"))
+    def get_members(self, obj: object) -> list[tuple[str, Any]]:
+        obj_dict: Mapping[str, Any] = getattr(obj, "__dict__")  # noqa: B009
+        results = []
+        for name in obj_dict:
+            if self.is_skipped_attribute(name):
+                continue
+            # Try to get the value via getattr
+            try:
+                value = getattr(obj, name)
+            except AttributeError:
+                continue
             else:
-                output_signature += " ..."
-                output.append(output_signature)
-
+                results.append((name, value))
+        return results
 
-def strip_or_import(
-    typ: str, module: ModuleType, known_modules: list[str], imports: list[str]
-) -> str:
-    """Strips unnecessary module names from typ.
+    def get_type_annotation(self, obj: object) -> str:
+        """
+        Given an instance, return a string representation of its type that is valid
+        to use as a type annotation.
+        """
+        if obj is None or obj is type(None):
+            return "None"
+        elif inspect.isclass(obj):
+            return "type[{}]".format(self.get_type_fullname(obj))
+        elif isinstance(obj, FunctionType):
+            return self.add_name("typing.Callable")
+        elif isinstance(obj, ModuleType):
+            return self.add_name("types.ModuleType", require=False)
+        else:
+            return self.get_type_fullname(type(obj))
 
-    If typ represents a type that is inside module or is a type coming from builtins, remove
-    module declaration from it. Return stripped name of the type.
+    def is_function(self, obj: object) -> bool:
+        if self.is_c_module:
+            return inspect.isbuiltin(obj)
+        else:
+            return inspect.isfunction(obj)
+
+    def is_method(self, class_info: ClassInfo, name: str, obj: object) -> bool:
+        if self.is_c_module:
+            return inspect.ismethoddescriptor(obj) or type(obj) in (
+                type(str.index),
+                type(str.__add__),
+                type(str.__new__),
+            )
+        else:
+            # this is valid because it is only called on members of a class
+            return inspect.isfunction(obj)
+
+    def is_classmethod(self, class_info: ClassInfo, name: str, obj: object) -> bool:
+        if self.is_c_module:
+            return inspect.isbuiltin(obj) or type(obj).__name__ in (
+                "classmethod",
+                "classmethod_descriptor",
+            )
+        else:
+            return inspect.ismethod(obj)
 
-    Arguments:
-        typ: name of the type
-        module: in which this type is used
-        known_modules: other modules being processed
-        imports: list of import statements (may be modified during the call)
-    """
-    local_modules = ["builtins"]
-    if module:
-        local_modules.append(module.__name__)
-
-    stripped_type = typ
-    if any(c in typ for c in "[,"):
-        for subtyp in re.split(r"[\[,\]]", typ):
-            stripped_subtyp = strip_or_import(subtyp.strip(), module, known_modules, imports)
-            if stripped_subtyp != subtyp:
-                stripped_type = re.sub(
-                    r"(^|[\[, ]+)" + re.escape(subtyp) + r"($|[\], ]+)",
-                    r"\1" + stripped_subtyp + r"\2",
-                    stripped_type,
-                )
-    elif "." in typ:
-        for module_name in local_modules + list(reversed(known_modules)):
-            if typ.startswith(module_name + "."):
-                if module_name in local_modules:
-                    stripped_type = typ[len(module_name) + 1 :]
-                arg_module = module_name
-                break
+    def is_staticmethod(self, class_info: ClassInfo | None, name: str, obj: object) -> bool:
+        if self.is_c_module:
+            return False
         else:
-            arg_module = typ[: typ.rindex(".")]
-        if arg_module not in local_modules:
-            imports.append(f"import {arg_module}")
-    if stripped_type == "NoneType":
-        stripped_type = "None"
-    return stripped_type
-
-
-def is_static_property(obj: object) -> bool:
-    return type(obj).__name__ == "pybind11_static_property"
-
-
-def generate_c_property_stub(
-    name: str,
-    obj: object,
-    static_properties: list[str],
-    rw_properties: list[str],
-    ro_properties: list[str],
-    readonly: bool,
-    module: ModuleType | None = None,
-    known_modules: list[str] | None = None,
-    imports: list[str] | None = None,
-) -> None:
-    """Generate property stub using introspection of 'obj'.
+            return class_info is not None and isinstance(
+                inspect.getattr_static(class_info.cls, name), staticmethod
+            )
 
-    Try to infer type from docstring, append resulting lines to 'output'.
-    """
+    @staticmethod
+    def is_abstract_method(obj: object) -> bool:
+        return getattr(obj, "__abstractmethod__", False)
 
-    def infer_prop_type(docstr: str | None) -> str | None:
-        """Infer property type from docstring or docstring signature."""
-        if docstr is not None:
-            inferred = infer_ret_type_sig_from_anon_docstring(docstr)
-            if not inferred:
-                inferred = infer_ret_type_sig_from_docstring(docstr, name)
-            if not inferred:
-                inferred = infer_prop_type_from_docstring(docstr)
-            return inferred
-        else:
-            return None
+    @staticmethod
+    def is_property(class_info: ClassInfo, name: str, obj: object) -> bool:
+        return inspect.isdatadescriptor(obj) or hasattr(obj, "fget")
 
-    inferred = infer_prop_type(getattr(obj, "__doc__", None))
-    if not inferred:
-        fget = getattr(obj, "fget", None)
-        inferred = infer_prop_type(getattr(fget, "__doc__", None))
-    if not inferred:
-        inferred = "Any"
-
-    if module is not None and imports is not None and known_modules is not None:
-        inferred = strip_or_import(inferred, module, known_modules, imports)
-
-    if is_static_property(obj):
-        trailing_comment = "  # read-only" if readonly else ""
-        static_properties.append(f"{name}: ClassVar[{inferred}] = ...{trailing_comment}")
-    else:  # regular property
-        if readonly:
-            ro_properties.append("@property")
-            ro_properties.append(f"def {name}(self) -> {inferred}: ...")
+    @staticmethod
+    def is_property_readonly(prop: Any) -> bool:
+        return hasattr(prop, "fset") and prop.fset is None
+
+    def is_static_property(self, obj: object) -> bool:
+        """For c-modules, whether the property behaves like an attribute"""
+        if self.is_c_module:
+            # StaticProperty is from boost-python
+            return type(obj).__name__ in ("pybind11_static_property", "StaticProperty")
         else:
-            rw_properties.append(f"{name}: {inferred}")
+            return False
+
+    def process_inferred_sigs(self, inferred: list[FunctionSig]) -> None:
+        for i, sig in enumerate(inferred):
+            for arg in sig.args:
+                if arg.type is not None:
+                    arg.type = self.strip_or_import(arg.type)
+            if sig.ret_type is not None:
+                inferred[i] = sig._replace(ret_type=self.strip_or_import(sig.ret_type))
+
+    def generate_function_stub(
+        self, name: str, obj: object, *, output: list[str], class_info: ClassInfo | None = None
+    ) -> None:
+        """Generate stub for a single function or method.
+
+        The result (always a single line) will be appended to 'output'.
+        If necessary, any required names will be added to 'imports'.
+        The 'class_name' is used to find signature of __init__ or __new__ in
+        'class_sigs'.
+        """
+        docstring: Any = getattr(obj, "__doc__", None)
+        if not isinstance(docstring, str):
+            docstring = None
+
+        ctx = FunctionContext(
+            self.module_name,
+            name,
+            docstring=docstring,
+            is_abstract=self.is_abstract_method(obj),
+            class_info=class_info,
+        )
+        if self.is_private_name(name, ctx.fullname) or self.is_not_in_all(name):
+            return
 
+        self.record_name(ctx.name)
+        default_sig = self.get_default_function_sig(obj, ctx)
+        inferred = self.get_signatures(default_sig, self.sig_generators, ctx)
+        self.process_inferred_sigs(inferred)
 
-def generate_c_type_stub(
-    module: ModuleType,
-    class_name: str,
-    obj: type,
-    output: list[str],
-    known_modules: list[str],
-    imports: list[str],
-    sig_generators: Iterable[SignatureGenerator],
-    include_docstrings: bool = False,
-) -> None:
-    """Generate stub for a single class using runtime introspection.
+        decorators = []
+        if len(inferred) > 1:
+            decorators.append("@{}".format(self.add_name("typing.overload")))
 
-    The result lines will be appended to 'output'. If necessary, any
-    required names will be added to 'imports'.
-    """
-    raw_lookup = getattr(obj, "__dict__")  # noqa: B009
-    items = sorted(get_members(obj), key=lambda x: method_name_sort_key(x[0]))
-    names = {x[0] for x in items}
-    methods: list[str] = []
-    types: list[str] = []
-    static_properties: list[str] = []
-    rw_properties: list[str] = []
-    ro_properties: list[str] = []
-    attrs: list[tuple[str, Any]] = []
-    for attr, value in items:
-        # use unevaluated descriptors when dealing with property inspection
-        raw_value = raw_lookup.get(attr, value)
-        if is_c_method(value) or is_c_classmethod(value):
-            if attr == "__new__":
-                # TODO: We should support __new__.
-                if "__init__" in names:
-                    # Avoid duplicate functions if both are present.
-                    # But is there any case where .__new__() has a
-                    # better signature than __init__() ?
-                    continue
-                attr = "__init__"
-            if is_c_classmethod(value):
-                self_var = "cls"
+        if ctx.is_abstract:
+            decorators.append("@{}".format(self.add_name("abc.abstractmethod")))
+
+        if class_info is not None:
+            if self.is_staticmethod(class_info, name, obj):
+                decorators.append("@staticmethod")
             else:
-                self_var = "self"
-            generate_c_function_stub(
-                module,
-                attr,
-                value,
-                output=methods,
-                known_modules=known_modules,
-                imports=imports,
-                self_var=self_var,
-                cls=obj,
-                class_name=class_name,
-                sig_generators=sig_generators,
-                include_docstrings=include_docstrings,
-            )
-        elif is_c_property(raw_value):
-            generate_c_property_stub(
-                attr,
-                raw_value,
-                static_properties,
-                rw_properties,
-                ro_properties,
-                is_c_property_readonly(raw_value),
-                module=module,
-                known_modules=known_modules,
-                imports=imports,
-            )
-        elif is_c_type(value):
-            generate_c_type_stub(
-                module,
-                attr,
-                value,
-                types,
-                imports=imports,
-                known_modules=known_modules,
-                sig_generators=sig_generators,
-                include_docstrings=include_docstrings,
+                for sig in inferred:
+                    if not sig.args or sig.args[0].name not in ("self", "cls"):
+                        sig.args.insert(0, ArgSig(name=class_info.self_var))
+                # a sig generator indicates @classmethod by specifying the cls arg.
+                if inferred[0].args and inferred[0].args[0].name == "cls":
+                    decorators.append("@classmethod")
+
+        output.extend(self.format_func_def(inferred, decorators=decorators, docstring=docstring))
+        self._fix_iter(ctx, inferred, output)
+
+    def _fix_iter(
+        self, ctx: FunctionContext, inferred: list[FunctionSig], output: list[str]
+    ) -> None:
+        """Ensure that objects which implement old-style iteration via __getitem__
+        are considered iterable.
+        """
+        if (
+            ctx.class_info
+            and ctx.class_info.cls is not None
+            and ctx.name == "__getitem__"
+            and "__iter__" not in ctx.class_info.cls.__dict__
+        ):
+            item_type: str | None = None
+            for sig in inferred:
+                if sig.args and sig.args[-1].type == "int":
+                    item_type = sig.ret_type
+                    break
+            if item_type is None:
+                return
+            obj = CFunctionStub(
+                "__iter__", f"def __iter__(self) -> typing.Iterator[{item_type}]\n"
             )
+            self.generate_function_stub("__iter__", obj, output=output, class_info=ctx.class_info)
+
+    def generate_property_stub(
+        self,
+        name: str,
+        raw_obj: object,
+        obj: object,
+        static_properties: list[str],
+        rw_properties: list[str],
+        ro_properties: list[str],
+        class_info: ClassInfo | None = None,
+    ) -> None:
+        """Generate property stub using introspection of 'obj'.
+
+        Try to infer type from docstring, append resulting lines to 'output'.
+
+        raw_obj : object before evaluation of descriptor (if any)
+        obj : object after evaluation of descriptor
+        """
+
+        docstring = getattr(raw_obj, "__doc__", None)
+        fget = getattr(raw_obj, "fget", None)
+        if fget:
+            alt_docstr = getattr(fget, "__doc__", None)
+            if alt_docstr and docstring:
+                docstring += alt_docstr
+            elif alt_docstr:
+                docstring = alt_docstr
+
+        ctx = FunctionContext(
+            self.module_name, name, docstring=docstring, is_abstract=False, class_info=class_info
+        )
+
+        if self.is_private_name(name, ctx.fullname) or self.is_not_in_all(name):
+            return
+
+        self.record_name(ctx.name)
+        static = self.is_static_property(raw_obj)
+        readonly = self.is_property_readonly(raw_obj)
+        if static:
+            ret_type: str | None = self.strip_or_import(self.get_type_annotation(obj))
         else:
-            attrs.append((attr, value))
+            default_sig = self.get_default_function_sig(raw_obj, ctx)
+            ret_type = default_sig.ret_type
+
+        inferred_type = self.get_property_type(ret_type, self.sig_generators, ctx)
+        if inferred_type is not None:
+            inferred_type = self.strip_or_import(inferred_type)
 
-    for attr, value in attrs:
-        static_properties.append(
-            "{}: ClassVar[{}] = ...".format(
-                attr,
-                strip_or_import(get_type_fullname(type(value)), module, known_modules, imports),
+        if static:
+            classvar = self.add_name("typing.ClassVar")
+            trailing_comment = "  # read-only" if readonly else ""
+            if inferred_type is None:
+                inferred_type = self.add_name("_typeshed.Incomplete")
+
+            static_properties.append(
+                f"{self._indent}{name}: {classvar}[{inferred_type}] = ...{trailing_comment}"
             )
-        )
-    all_bases = type.mro(obj)
-    if all_bases[-1] is object:
-        # TODO: Is this always object?
-        del all_bases[-1]
-    # remove pybind11_object. All classes generated by pybind11 have pybind11_object in their MRO,
-    # which only overrides a few functions in object type
-    if all_bases and all_bases[-1].__name__ == "pybind11_object":
-        del all_bases[-1]
-    # remove the class itself
-    all_bases = all_bases[1:]
-    # Remove base classes of other bases as redundant.
-    bases: list[type] = []
-    for base in all_bases:
-        if not any(issubclass(b, base) for b in bases):
-            bases.append(base)
-    if bases:
-        bases_str = "(%s)" % ", ".join(
-            strip_or_import(get_type_fullname(base), module, known_modules, imports)
-            for base in bases
-        )
-    else:
-        bases_str = ""
-    if types or static_properties or rw_properties or methods or ro_properties:
-        output.append(f"class {class_name}{bases_str}:")
-        for line in types:
-            if (
-                output
-                and output[-1]
-                and not output[-1].startswith("class")
-                and line.startswith("class")
+        else:  # regular property
+            if readonly:
+                ro_properties.append(f"{self._indent}@property")
+                sig = FunctionSig(name, [ArgSig("self")], inferred_type)
+                ro_properties.append(sig.format_sig(indent=self._indent))
+            else:
+                if inferred_type is None:
+                    inferred_type = self.add_name("_typeshed.Incomplete")
+
+                rw_properties.append(f"{self._indent}{name}: {inferred_type}")
+
+    def get_type_fullname(self, typ: type) -> str:
+        """Given a type, return a string representation"""
+        if typ is Any:
+            return "Any"
+        typename = getattr(typ, "__qualname__", typ.__name__)
+        module_name = self.get_obj_module(typ)
+        assert module_name is not None, typ
+        if module_name != "builtins":
+            typename = f"{module_name}.{typename}"
+        return typename
+
+    def get_base_types(self, obj: type) -> list[str]:
+        all_bases = type.mro(obj)
+        if all_bases[-1] is object:
+            # TODO: Is this always object?
+            del all_bases[-1]
+        # remove pybind11_object. All classes generated by pybind11 have pybind11_object in their MRO,
+        # which only overrides a few functions in object type
+        if all_bases and all_bases[-1].__name__ == "pybind11_object":
+            del all_bases[-1]
+        # remove the class itself
+        all_bases = all_bases[1:]
+        # Remove base classes of other bases as redundant.
+        bases: list[type] = []
+        for base in all_bases:
+            if not any(issubclass(b, base) for b in bases):
+                bases.append(base)
+        return [self.strip_or_import(self.get_type_fullname(base)) for base in bases]
+
+    def generate_class_stub(self, class_name: str, cls: type, output: list[str]) -> None:
+        """Generate stub for a single class using runtime introspection.
+
+        The result lines will be appended to 'output'. If necessary, any
+        required names will be added to 'imports'.
+        """
+        raw_lookup = getattr(cls, "__dict__")  # noqa: B009
+        items = self.get_members(cls)
+        if self.resort_members:
+            items = sorted(items, key=lambda x: method_name_sort_key(x[0]))
+        names = set(x[0] for x in items)
+        methods: list[str] = []
+        types: list[str] = []
+        static_properties: list[str] = []
+        rw_properties: list[str] = []
+        ro_properties: list[str] = []
+        attrs: list[tuple[str, Any]] = []
+
+        self.record_name(class_name)
+        self.indent()
+
+        class_info = ClassInfo(class_name, "", getattr(cls, "__doc__", None), cls)
+
+        for attr, value in items:
+            # use unevaluated descriptors when dealing with property inspection
+            raw_value = raw_lookup.get(attr, value)
+            if self.is_method(class_info, attr, value) or self.is_classmethod(
+                class_info, attr, value
             ):
-                output.append("")
-            output.append("    " + line)
-        for line in static_properties:
-            output.append(f"    {line}")
-        for line in rw_properties:
-            output.append(f"    {line}")
-        for line in methods:
-            output.append(f"    {line}")
-        for line in ro_properties:
-            output.append(f"    {line}")
-    else:
-        output.append(f"class {class_name}{bases_str}: ...")
+                if attr == "__new__":
+                    # TODO: We should support __new__.
+                    if "__init__" in names:
+                        # Avoid duplicate functions if both are present.
+                        # But is there any case where .__new__() has a
+                        # better signature than __init__() ?
+                        continue
+                    attr = "__init__"
+                # FIXME: make this nicer
+                if self.is_classmethod(class_info, attr, value):
+                    class_info.self_var = "cls"
+                else:
+                    class_info.self_var = "self"
+                self.generate_function_stub(attr, value, output=methods, class_info=class_info)
+            elif self.is_property(class_info, attr, raw_value):
+                self.generate_property_stub(
+                    attr,
+                    raw_value,
+                    value,
+                    static_properties,
+                    rw_properties,
+                    ro_properties,
+                    class_info,
+                )
+            elif inspect.isclass(value) and self.is_defined_in_module(value):
+                self.generate_class_stub(attr, value, types)
+            else:
+                attrs.append((attr, value))
 
+        for attr, value in attrs:
+            if attr == "__hash__" and value is None:
+                # special case for __hash__
+                continue
+            prop_type_name = self.strip_or_import(self.get_type_annotation(value))
+            classvar = self.add_name("typing.ClassVar")
+            static_properties.append(f"{self._indent}{attr}: {classvar}[{prop_type_name}] = ...")
 
-def get_type_fullname(typ: type) -> str:
-    return f"{typ.__module__}.{getattr(typ, '__qualname__', typ.__name__)}"
+        self.dedent()
+
+        bases = self.get_base_types(cls)
+        if bases:
+            bases_str = "(%s)" % ", ".join(bases)
+        else:
+            bases_str = ""
+        if types or static_properties or rw_properties or methods or ro_properties:
+            output.append(f"{self._indent}class {class_name}{bases_str}:")
+            for line in types:
+                if (
+                    output
+                    and output[-1]
+                    and not output[-1].strip().startswith("class")
+                    and line.strip().startswith("class")
+                ):
+                    output.append("")
+                output.append(line)
+            for line in static_properties:
+                output.append(line)
+            for line in rw_properties:
+                output.append(line)
+            for line in methods:
+                output.append(line)
+            for line in ro_properties:
+                output.append(line)
+        else:
+            output.append(f"{self._indent}class {class_name}{bases_str}: ...")
+
+    def generate_variable_stub(self, name: str, obj: object, output: list[str]) -> None:
+        """Generate stub for a single variable using runtime introspection.
+
+        The result lines will be appended to 'output'. If necessary, any
+        required names will be added to 'imports'.
+        """
+        if self.is_private_name(name, f"{self.module_name}.{name}") or self.is_not_in_all(name):
+            return
+        self.record_name(name)
+        type_str = self.strip_or_import(self.get_type_annotation(obj))
+        output.append(f"{name}: {type_str}")
 
 
 def method_name_sort_key(name: str) -> tuple[int, str]:
@@ -648,22 +837,9 @@ def is_pybind_skipped_attribute(attr: str) -> bool:
     return attr.startswith("__pybind11_module_local_")
 
 
-def is_skipped_attribute(attr: str) -> bool:
-    return attr in (
-        "__class__",
-        "__getattribute__",
-        "__str__",
-        "__repr__",
-        "__doc__",
-        "__dict__",
-        "__module__",
-        "__weakref__",
-    ) or is_pybind_skipped_attribute(  # For pickling
-        attr
-    )
-
-
-def infer_method_args(name: str, self_var: str | None = None) -> list[ArgSig]:
+def infer_c_method_args(
+    name: str, self_var: str = "self", arg_names: list[str] | None = None
+) -> list[ArgSig]:
     args: list[ArgSig] | None = None
     if name.startswith("__") and name.endswith("__"):
         name = name[2:-2]
@@ -703,13 +879,9 @@ def infer_method_args(name: str, self_var: str | None = None) -> list[ArgSig]:
             args = []
         elif name == "setstate":
             args = [ArgSig(name="state")]
+        elif name in ("eq", "ne", "lt", "le", "gt", "ge"):
+            args = [ArgSig(name="other", type="object")]
         elif name in (
-            "eq",
-            "ne",
-            "lt",
-            "le",
-            "gt",
-            "ge",
             "add",
             "radd",
             "sub",
@@ -761,22 +933,15 @@ def infer_method_args(name: str, self_var: str | None = None) -> list[ArgSig]:
         elif name == "reduce_ex":
             args = [ArgSig(name="protocol")]
         elif name == "exit":
-            args = [ArgSig(name="type"), ArgSig(name="value"), ArgSig(name="traceback")]
+            args = [
+                ArgSig(name="type", type="type[BaseException] | None"),
+                ArgSig(name="value", type="BaseException | None"),
+                ArgSig(name="traceback", type="types.TracebackType | None"),
+            ]
+    if args is None:
+        args = infer_method_arg_types(name, self_var, arg_names)
+    else:
+        args = [ArgSig(name=self_var)] + args
     if args is None:
         args = [ArgSig(name="*args"), ArgSig(name="**kwargs")]
-    return [ArgSig(name=self_var or "self")] + args
-
-
-def infer_method_ret_type(name: str) -> str:
-    if name.startswith("__") and name.endswith("__"):
-        name = name[2:-2]
-        if name in ("float", "bool", "bytes", "int"):
-            return name
-        # Note: __eq__ and co may return arbitrary types, but bool is good enough for stubgen.
-        elif name in ("eq", "ne", "lt", "le", "gt", "ge", "contains"):
-            return "bool"
-        elif name in ("len", "hash", "sizeof", "trunc", "floor", "ceil"):
-            return "int"
-        elif name in ("init", "setitem"):
-            return "None"
-    return "Any"
+    return args
diff --git a/mypy/stubutil.py b/mypy/stubutil.py
index e15766b66cb3..22e525c14e7c 100644
--- a/mypy/stubutil.py
+++ b/mypy/stubutil.py
@@ -5,19 +5,26 @@
 import os.path
 import re
 import sys
+from abc import abstractmethod
+from collections import defaultdict
 from contextlib import contextmanager
-from typing import Iterator
+from typing import Final, Iterable, Iterator, Mapping
 from typing_extensions import overload
 
+from mypy_extensions import mypyc_attr
+
+import mypy.options
 from mypy.modulefinder import ModuleNotFoundReason
 from mypy.moduleinspect import InspectError, ModuleInspect
+from mypy.stubdoc import ArgSig, FunctionSig
+from mypy.types import AnyType, NoneType, Type, TypeList, TypeStrVisitor, UnboundType, UnionType
 
 # Modules that may fail when imported, or that may have side effects (fully qualified).
 NOT_IMPORTABLE_MODULES = ()
 
 
 class CantImport(Exception):
-    def __init__(self, module: str, message: str):
+    def __init__(self, module: str, message: str) -> None:
         self.module = module
         self.message = message
 
@@ -70,8 +77,9 @@ def find_module_path_and_all_py3(
 ) -> tuple[str | None, list[str] | None] | None:
     """Find module and determine __all__ for a Python 3 module.
 
-    Return None if the module is a C module. Return (module_path, __all__) if
-    it is a Python module. Raise CantImport if import failed.
+    Return None if the module is a C or pyc-only module.
+    Return (module_path, __all__) if it is a Python module.
+    Raise CantImport if import failed.
     """
     if module in NOT_IMPORTABLE_MODULES:
         raise CantImport(module, "")
@@ -182,3 +190,591 @@ def common_dir_prefix(paths: list[str]) -> str:
                 cur = path
                 break
     return cur or "."
+
+
+class AnnotationPrinter(TypeStrVisitor):
+    """Visitor used to print existing annotations in a file.
+
+    The main difference from TypeStrVisitor is a better treatment of
+    unbound types.
+
+    Notes:
+    * This visitor doesn't add imports necessary for annotations, this is done separately
+      by ImportTracker.
+    * It can print all kinds of types, but the generated strings may not be valid (notably
+      callable types) since it prints the same string that reveal_type() does.
+    * For Instance types it prints the fully qualified names.
+    """
+
+    # TODO: Generate valid string representation for callable types.
+    # TODO: Use short names for Instances.
+    def __init__(
+        self,
+        stubgen: BaseStubGenerator,
+        known_modules: list[str] | None = None,
+        local_modules: list[str] | None = None,
+    ) -> None:
+        super().__init__(options=mypy.options.Options())
+        self.stubgen = stubgen
+        self.known_modules = known_modules
+        self.local_modules = local_modules or ["builtins"]
+
+    def visit_any(self, t: AnyType) -> str:
+        s = super().visit_any(t)
+        self.stubgen.import_tracker.require_name(s)
+        return s
+
+    def visit_unbound_type(self, t: UnboundType) -> str:
+        s = t.name
+        if self.known_modules is not None and "." in s:
+            # see if this object is from any of the modules that we're currently processing.
+            # reverse sort so that subpackages come before parents: e.g. "foo.bar" before "foo".
+            for module_name in self.local_modules + sorted(self.known_modules, reverse=True):
+                if s.startswith(module_name + "."):
+                    if module_name in self.local_modules:
+                        s = s[len(module_name) + 1 :]
+                    arg_module = module_name
+                    break
+            else:
+                arg_module = s[: s.rindex(".")]
+            if arg_module not in self.local_modules:
+                self.stubgen.import_tracker.add_import(arg_module, require=True)
+        elif s == "NoneType":
+            # when called without analysis all types are unbound, so this won't hit
+            # visit_none_type().
+            s = "None"
+        else:
+            self.stubgen.import_tracker.require_name(s)
+        if t.args:
+            s += f"[{self.args_str(t.args)}]"
+        return s
+
+    def visit_none_type(self, t: NoneType) -> str:
+        return "None"
+
+    def visit_type_list(self, t: TypeList) -> str:
+        return f"[{self.list_str(t.items)}]"
+
+    def visit_union_type(self, t: UnionType) -> str:
+        return " | ".join([item.accept(self) for item in t.items])
+
+    def args_str(self, args: Iterable[Type]) -> str:
+        """Convert an array of arguments to strings and join the results with commas.
+
+        The main difference from list_str is the preservation of quotes for string
+        arguments
+        """
+        types = ["builtins.bytes", "builtins.str"]
+        res = []
+        for arg in args:
+            arg_str = arg.accept(self)
+            if isinstance(arg, UnboundType) and arg.original_str_fallback in types:
+                res.append(f"'{arg_str}'")
+            else:
+                res.append(arg_str)
+        return ", ".join(res)
+
+
+class ClassInfo:
+    def __init__(
+        self, name: str, self_var: str, docstring: str | None = None, cls: type | None = None
+    ) -> None:
+        self.name = name
+        self.self_var = self_var
+        self.docstring = docstring
+        self.cls = cls
+
+
+class FunctionContext:
+    def __init__(
+        self,
+        module_name: str,
+        name: str,
+        docstring: str | None = None,
+        is_abstract: bool = False,
+        class_info: ClassInfo | None = None,
+    ) -> None:
+        self.module_name = module_name
+        self.name = name
+        self.docstring = docstring
+        self.is_abstract = is_abstract
+        self.class_info = class_info
+        self._fullname: str | None = None
+
+    @property
+    def fullname(self) -> str:
+        if self._fullname is None:
+            if self.class_info:
+                self._fullname = f"{self.module_name}.{self.class_info.name}.{self.name}"
+            else:
+                self._fullname = f"{self.module_name}.{self.name}"
+        return self._fullname
+
+
+def infer_method_ret_type(name: str) -> str | None:
+    """Infer return types for known special methods"""
+    if name.startswith("__") and name.endswith("__"):
+        name = name[2:-2]
+        if name in ("float", "bool", "bytes", "int", "complex", "str"):
+            return name
+        # Note: __eq__ and co may return arbitrary types, but bool is good enough for stubgen.
+        elif name in ("eq", "ne", "lt", "le", "gt", "ge", "contains"):
+            return "bool"
+        elif name in ("len", "length_hint", "index", "hash", "sizeof", "trunc", "floor", "ceil"):
+            return "int"
+        elif name in ("format", "repr"):
+            return "str"
+        elif name in ("init", "setitem", "del", "delitem"):
+            return "None"
+    return None
+
+
+def infer_method_arg_types(
+    name: str, self_var: str = "self", arg_names: list[str] | None = None
+) -> list[ArgSig] | None:
+    """Infer argument types for known special methods"""
+    args: list[ArgSig] | None = None
+    if name.startswith("__") and name.endswith("__"):
+        if arg_names and len(arg_names) >= 1 and arg_names[0] == "self":
+            arg_names = arg_names[1:]
+
+        name = name[2:-2]
+        if name == "exit":
+            if arg_names is None:
+                arg_names = ["type", "value", "traceback"]
+            if len(arg_names) == 3:
+                arg_types = [
+                    "type[BaseException] | None",
+                    "BaseException | None",
+                    "types.TracebackType | None",
+                ]
+                args = [
+                    ArgSig(name=arg_name, type=arg_type)
+                    for arg_name, arg_type in zip(arg_names, arg_types)
+                ]
+    if args is not None:
+        return [ArgSig(name=self_var)] + args
+    return None
+
+
+@mypyc_attr(allow_interpreted_subclasses=True)
+class SignatureGenerator:
+    """Abstract base class for extracting a list of FunctionSigs for each function."""
+
+    def remove_self_type(
+        self, inferred: list[FunctionSig] | None, self_var: str
+    ) -> list[FunctionSig] | None:
+        """Remove type annotation from self/cls argument"""
+        if inferred:
+            for signature in inferred:
+                if signature.args:
+                    if signature.args[0].name == self_var:
+                        signature.args[0].type = None
+        return inferred
+
+    @abstractmethod
+    def get_function_sig(
+        self, default_sig: FunctionSig, ctx: FunctionContext
+    ) -> list[FunctionSig] | None:
+        """Return a list of signatures for the given function.
+
+        If no signature can be found, return None. If all of the registered SignatureGenerators
+        for the stub generator return None, then the default_sig will be used.
+        """
+        pass
+
+    @abstractmethod
+    def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> str | None:
+        """Return the type of the given property"""
+        pass
+
+
+class ImportTracker:
+    """Record necessary imports during stub generation."""
+
+    def __init__(self) -> None:
+        # module_for['foo'] has the module name where 'foo' was imported from, or None if
+        # 'foo' is a module imported directly;
+        # direct_imports['foo'] is the module path used when the name 'foo' was added to the
+        # namespace.
+        # reverse_alias['foo'] is the name that 'foo' had originally when imported with an
+        # alias; examples
+        #     'from pkg import mod'      ==> module_for['mod'] == 'pkg'
+        #     'from pkg import mod as m' ==> module_for['m'] == 'pkg'
+        #                                ==> reverse_alias['m'] == 'mod'
+        #     'import pkg.mod as m'      ==> module_for['m'] == None
+        #                                ==> reverse_alias['m'] == 'pkg.mod'
+        #     'import pkg.mod'           ==> module_for['pkg'] == None
+        #                                ==> module_for['pkg.mod'] == None
+        #                                ==> direct_imports['pkg'] == 'pkg.mod'
+        #                                ==> direct_imports['pkg.mod'] == 'pkg.mod'
+        self.module_for: dict[str, str | None] = {}
+        self.direct_imports: dict[str, str] = {}
+        self.reverse_alias: dict[str, str] = {}
+
+        # required_names is the set of names that are actually used in a type annotation
+        self.required_names: set[str] = set()
+
+        # Names that should be reexported if they come from another module
+        self.reexports: set[str] = set()
+
+    def add_import_from(
+        self, module: str, names: list[tuple[str, str | None]], require: bool = False
+    ) -> None:
+        for name, alias in names:
+            if alias:
+                # 'from {module} import {name} as {alias}'
+                self.module_for[alias] = module
+                self.reverse_alias[alias] = name
+            else:
+                # 'from {module} import {name}'
+                self.module_for[name] = module
+                self.reverse_alias.pop(name, None)
+            if require:
+                self.require_name(alias or name)
+            self.direct_imports.pop(alias or name, None)
+
+    def add_import(self, module: str, alias: str | None = None, require: bool = False) -> None:
+        if alias:
+            # 'import {module} as {alias}'
+            assert "." not in alias  # invalid syntax
+            self.module_for[alias] = None
+            self.reverse_alias[alias] = module
+            if require:
+                self.required_names.add(alias)
+        else:
+            # 'import {module}'
+            name = module
+            if require:
+                self.required_names.add(name)
+            # add module and its parent packages
+            while name:
+                self.module_for[name] = None
+                self.direct_imports[name] = module
+                self.reverse_alias.pop(name, None)
+                name = name.rpartition(".")[0]
+
+    def require_name(self, name: str) -> None:
+        while name not in self.direct_imports and "." in name:
+            name = name.rsplit(".", 1)[0]
+        self.required_names.add(name)
+
+    def reexport(self, name: str) -> None:
+        """Mark a given non qualified name as needed in __all__.
+
+        This means that in case it comes from a module, it should be
+        imported with an alias even if the alias is the same as the name.
+        """
+        self.require_name(name)
+        self.reexports.add(name)
+
+    def import_lines(self) -> list[str]:
+        """The list of required import lines (as strings with python code).
+
+        In order for a module be included in this output, an indentifier must be both
+        'required' via require_name() and 'imported' via add_import_from()
+        or add_import()
+        """
+        result = []
+
+        # To summarize multiple names imported from a same module, we collect those
+        # in the `module_map` dictionary, mapping a module path to the list of names that should
+        # be imported from it. the names can also be alias in the form 'original as alias'
+        module_map: Mapping[str, list[str]] = defaultdict(list)
+
+        for name in sorted(
+            self.required_names,
+            key=lambda n: (self.reverse_alias[n], n) if n in self.reverse_alias else (n, ""),
+        ):
+            # If we haven't seen this name in an import statement, ignore it
+            if name not in self.module_for:
+                continue
+
+            m = self.module_for[name]
+            if m is not None:
+                # This name was found in a from ... import ...
+                # Collect the name in the module_map
+                if name in self.reverse_alias:
+                    name = f"{self.reverse_alias[name]} as {name}"
+                elif name in self.reexports:
+                    name = f"{name} as {name}"
+                module_map[m].append(name)
+            else:
+                # This name was found in an import ...
+                # We can already generate the import line
+                if name in self.reverse_alias:
+                    source = self.reverse_alias[name]
+                    result.append(f"import {source} as {name}\n")
+                elif name in self.reexports:
+                    assert "." not in name  # Because reexports only has nonqualified names
+                    result.append(f"import {name} as {name}\n")
+                else:
+                    result.append(f"import {name}\n")
+
+        # Now generate all the from ... import ... lines collected in module_map
+        for module, names in sorted(module_map.items()):
+            result.append(f"from {module} import {', '.join(sorted(names))}\n")
+        return result
+
+
+@mypyc_attr(allow_interpreted_subclasses=True)
+class BaseStubGenerator:
+    # These names should be omitted from generated stubs.
+    IGNORED_DUNDERS: Final = {
+        "__all__",
+        "__author__",
+        "__about__",
+        "__copyright__",
+        "__email__",
+        "__license__",
+        "__summary__",
+        "__title__",
+        "__uri__",
+        "__str__",
+        "__repr__",
+        "__getstate__",
+        "__setstate__",
+        "__slots__",
+        "__builtins__",
+        "__cached__",
+        "__file__",
+        "__name__",
+        "__package__",
+        "__path__",
+        "__spec__",
+        "__loader__",
+    }
+    TYPING_MODULE_NAMES: Final = ("typing", "typing_extensions")
+    # Special-cased names that are implicitly exported from the stub (from m import y as y).
+    EXTRA_EXPORTED: Final = {
+        "pyasn1_modules.rfc2437.univ",
+        "pyasn1_modules.rfc2459.char",
+        "pyasn1_modules.rfc2459.univ",
+    }
+
+    def __init__(
+        self,
+        _all_: list[str] | None = None,
+        include_private: bool = False,
+        export_less: bool = False,
+        include_docstrings: bool = False,
+    ):
+        # Best known value of __all__.
+        self._all_ = _all_
+        self._include_private = include_private
+        self._include_docstrings = include_docstrings
+        # Disable implicit exports of package-internal imports?
+        self.export_less = export_less
+        self._import_lines: list[str] = []
+        self._output: list[str] = []
+        # Current indent level (indent is hardcoded to 4 spaces).
+        self._indent = ""
+        self._toplevel_names: list[str] = []
+        self.import_tracker = ImportTracker()
+        # Top-level members
+        self.defined_names: set[str] = set()
+        self.sig_generators = self.get_sig_generators()
+        # populated by visit_mypy_file
+        self.module_name: str = ""
+
+    def get_sig_generators(self) -> list[SignatureGenerator]:
+        return []
+
+    def refers_to_fullname(self, name: str, fullname: str | tuple[str, ...]) -> bool:
+        """Return True if the variable name identifies the same object as the given fullname(s)."""
+        if isinstance(fullname, tuple):
+            return any(self.refers_to_fullname(name, fname) for fname in fullname)
+        module, short = fullname.rsplit(".", 1)
+        return self.import_tracker.module_for.get(name) == module and (
+            name == short or self.import_tracker.reverse_alias.get(name) == short
+        )
+
+    def add_name(self, fullname: str, require: bool = True) -> str:
+        """Add a name to be imported and return the name reference.
+
+        The import will be internal to the stub (i.e don't reexport).
+        """
+        module, name = fullname.rsplit(".", 1)
+        alias = "_" + name if name in self.defined_names else None
+        self.import_tracker.add_import_from(module, [(name, alias)], require=require)
+        return alias or name
+
+    def add_import_line(self, line: str) -> None:
+        """Add a line of text to the import section, unless it's already there."""
+        if line not in self._import_lines:
+            self._import_lines.append(line)
+
+    def get_imports(self) -> str:
+        """Return the import statements for the stub."""
+        imports = ""
+        if self._import_lines:
+            imports += "".join(self._import_lines)
+        imports += "".join(self.import_tracker.import_lines())
+        return imports
+
+    def output(self) -> str:
+        """Return the text for the stub."""
+        imports = self.get_imports()
+        if imports and self._output:
+            imports += "\n"
+        return imports + "".join(self._output)
+
+    def add(self, string: str) -> None:
+        """Add text to generated stub."""
+        self._output.append(string)
+
+    def is_top_level(self) -> bool:
+        """Are we processing the top level of a file?"""
+        return self._indent == ""
+
+    def indent(self) -> None:
+        """Add one level of indentation."""
+        self._indent += "    "
+
+    def dedent(self) -> None:
+        """Remove one level of indentation."""
+        self._indent = self._indent[:-4]
+
+    def record_name(self, name: str) -> None:
+        """Mark a name as defined.
+
+        This only does anything if at the top level of a module.
+        """
+        if self.is_top_level():
+            self._toplevel_names.append(name)
+
+    def is_recorded_name(self, name: str) -> bool:
+        """Has this name been recorded previously?"""
+        return self.is_top_level() and name in self._toplevel_names
+
+    def set_defined_names(self, defined_names: set[str]) -> None:
+        self.defined_names = defined_names
+        # Names in __all__ are required
+        for name in self._all_ or ():
+            if name not in self.IGNORED_DUNDERS:
+                self.import_tracker.reexport(name)
+
+        # These are "soft" imports for objects which might appear in annotations but not have
+        # a corresponding import statement.
+        known_imports = {
+            "_typeshed": ["Incomplete"],
+            "typing": ["Any", "TypeVar", "NamedTuple"],
+            "collections.abc": ["Generator"],
+            "typing_extensions": ["TypedDict", "ParamSpec", "TypeVarTuple"],
+        }
+        for pkg, imports in known_imports.items():
+            for t in imports:
+                # require=False means that the import won't be added unless require_name() is called
+                # for the object during generation.
+                self.add_name(f"{pkg}.{t}", require=False)
+
+    def check_undefined_names(self) -> None:
+        print(self._all_)
+        print(self._toplevel_names)
+        undefined_names = [name for name in self._all_ or [] if name not in self._toplevel_names]
+        if undefined_names:
+            if self._output:
+                self.add("\n")
+            self.add("# Names in __all__ with no definition:\n")
+            for name in sorted(undefined_names):
+                self.add(f"#   {name}\n")
+
+    def get_signatures(
+        self,
+        default_signature: FunctionSig,
+        sig_generators: list[SignatureGenerator],
+        func_ctx: FunctionContext,
+    ) -> list[FunctionSig]:
+        for sig_gen in sig_generators:
+            inferred = sig_gen.get_function_sig(default_signature, func_ctx)
+            if inferred:
+                return inferred
+
+        return [default_signature]
+
+    def get_property_type(
+        self,
+        default_type: str | None,
+        sig_generators: list[SignatureGenerator],
+        func_ctx: FunctionContext,
+    ) -> str | None:
+        for sig_gen in sig_generators:
+            inferred = sig_gen.get_property_type(default_type, func_ctx)
+            if inferred:
+                return inferred
+
+        return default_type
+
+    def format_func_def(
+        self,
+        sigs: list[FunctionSig],
+        is_coroutine: bool = False,
+        decorators: list[str] | None = None,
+        docstring: str | None = None,
+    ) -> list[str]:
+        lines: list[str] = []
+        if decorators is None:
+            decorators = []
+
+        for signature in sigs:
+            # dump decorators, just before "def ..."
+            for deco in decorators:
+                lines.append(f"{self._indent}{deco}")
+
+            lines.append(
+                signature.format_sig(
+                    indent=self._indent,
+                    is_async=is_coroutine,
+                    docstring=docstring if self._include_docstrings else None,
+                )
+            )
+        return lines
+
+    def print_annotation(
+        self,
+        t: Type,
+        known_modules: list[str] | None = None,
+        local_modules: list[str] | None = None,
+    ) -> str:
+        printer = AnnotationPrinter(self, known_modules, local_modules)
+        return t.accept(printer)
+
+    def is_not_in_all(self, name: str) -> bool:
+        if self.is_private_name(name):
+            return False
+        if self._all_:
+            return self.is_top_level() and name not in self._all_
+        return False
+
+    def is_private_name(self, name: str, fullname: str | None = None) -> bool:
+        if self._include_private:
+            return False
+        if fullname in self.EXTRA_EXPORTED:
+            return False
+        if name == "_":
+            return False
+        return name.startswith("_") and (not name.endswith("__") or name in self.IGNORED_DUNDERS)
+
+    def should_reexport(self, name: str, full_module: str, name_is_alias: bool) -> bool:
+        if (
+            not name_is_alias
+            and self.module_name
+            and (self.module_name + "." + name) in self.EXTRA_EXPORTED
+        ):
+            # Special case certain names that should be exported, against our general rules.
+            return True
+        is_private = self.is_private_name(name, full_module + "." + name)
+        top_level = full_module.split(".")[0]
+        self_top_level = self.module_name.split(".", 1)[0]
+        if (
+            not name_is_alias
+            and not self.export_less
+            and (not self._all_ or name in self.IGNORED_DUNDERS)
+            and self.module_name
+            and not is_private
+            and top_level in (self_top_level, "_" + self_top_level)
+        ):
+            # Export imports from the same package, since we can't reliably tell whether they
+            # are part of the public API.
+            return True
+        return False
diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py
index 7e30515ac892..ace0b4d95573 100644
--- a/mypy/test/teststubgen.py
+++ b/mypy/test/teststubgen.py
@@ -28,21 +28,19 @@
     Options,
     collect_build_targets,
     generate_stubs,
-    get_sig_generators,
     is_blacklisted_path,
     is_non_library_module,
     mypy_options,
     parse_options,
 )
-from mypy.stubgenc import (
-    generate_c_function_stub,
-    generate_c_property_stub,
-    generate_c_type_stub,
-    infer_method_args,
+from mypy.stubgenc import InspectionStubGenerator, infer_c_method_args
+from mypy.stubutil import (
+    ClassInfo,
+    common_dir_prefix,
     infer_method_ret_type,
-    is_c_property_readonly,
+    remove_misplaced_type_comments,
+    walk_packages,
 )
-from mypy.stubutil import common_dir_prefix, remove_misplaced_type_comments, walk_packages
 from mypy.test.data import DataDrivenTestCase, DataSuite
 from mypy.test.helpers import assert_equal, assert_string_arrays_equal, local_sys_path_set
 
@@ -62,7 +60,8 @@ def test_files_found(self) -> None:
                 os.mkdir(os.path.join("subdir", "pack"))
                 self.make_file("subdir", "pack", "__init__.py")
                 opts = parse_options(["subdir"])
-                py_mods, c_mods = collect_build_targets(opts, mypy_options(opts))
+                py_mods, pyi_mods, c_mods = collect_build_targets(opts, mypy_options(opts))
+                assert_equal(pyi_mods, [])
                 assert_equal(c_mods, [])
                 files = {mod.path for mod in py_mods}
                 assert_equal(
@@ -87,7 +86,8 @@ def test_packages_found(self) -> None:
                 self.make_file("pack", "a.py")
                 self.make_file("pack", "b.py")
                 opts = parse_options(["-p", "pack"])
-                py_mods, c_mods = collect_build_targets(opts, mypy_options(opts))
+                py_mods, pyi_mods, c_mods = collect_build_targets(opts, mypy_options(opts))
+                assert_equal(pyi_mods, [])
                 assert_equal(c_mods, [])
                 files = {os.path.relpath(mod.path or "FAIL") for mod in py_mods}
                 assert_equal(
@@ -111,7 +111,7 @@ def test_module_not_found(self) -> None:
                 os.chdir(tmp)
                 self.make_file(tmp, "mymodule.py", content="import a")
                 opts = parse_options(["-m", "mymodule"])
-                py_mods, c_mods = collect_build_targets(opts, mypy_options(opts))
+                collect_build_targets(opts, mypy_options(opts))
                 assert captured_output.getvalue() == ""
             finally:
                 sys.stdout = sys.__stdout__
@@ -702,10 +702,14 @@ def run_case_inner(self, testcase: DataDrivenTestCase) -> None:
         out_dir = "out"
         try:
             try:
-                if not testcase.name.endswith("_import"):
-                    options.no_import = True
-                if not testcase.name.endswith("_semanal"):
-                    options.parse_only = True
+                if testcase.name.endswith("_inspect"):
+                    options.inspect = True
+                else:
+                    if not testcase.name.endswith("_import"):
+                        options.no_import = True
+                    if not testcase.name.endswith("_semanal"):
+                        options.parse_only = True
+
                 generate_stubs(options)
                 a: list[str] = []
                 for module in modules:
@@ -781,35 +785,28 @@ class StubgencSuite(unittest.TestCase):
     """
 
     def test_infer_hash_sig(self) -> None:
-        assert_equal(infer_method_args("__hash__"), [self_arg])
+        assert_equal(infer_c_method_args("__hash__"), [self_arg])
         assert_equal(infer_method_ret_type("__hash__"), "int")
 
     def test_infer_getitem_sig(self) -> None:
-        assert_equal(infer_method_args("__getitem__"), [self_arg, ArgSig(name="index")])
+        assert_equal(infer_c_method_args("__getitem__"), [self_arg, ArgSig(name="index")])
 
     def test_infer_setitem_sig(self) -> None:
         assert_equal(
-            infer_method_args("__setitem__"),
+            infer_c_method_args("__setitem__"),
             [self_arg, ArgSig(name="index"), ArgSig(name="object")],
         )
         assert_equal(infer_method_ret_type("__setitem__"), "None")
 
+    def test_infer_eq_op_sig(self) -> None:
+        for op in ("eq", "ne", "lt", "le", "gt", "ge"):
+            assert_equal(
+                infer_c_method_args(f"__{op}__"), [self_arg, ArgSig(name="other", type="object")]
+            )
+
     def test_infer_binary_op_sig(self) -> None:
-        for op in (
-            "eq",
-            "ne",
-            "lt",
-            "le",
-            "gt",
-            "ge",
-            "add",
-            "radd",
-            "sub",
-            "rsub",
-            "mul",
-            "rmul",
-        ):
-            assert_equal(infer_method_args(f"__{op}__"), [self_arg, ArgSig(name="other")])
+        for op in ("add", "radd", "sub", "rsub", "mul", "rmul"):
+            assert_equal(infer_c_method_args(f"__{op}__"), [self_arg, ArgSig(name="other")])
 
     def test_infer_equality_op_sig(self) -> None:
         for op in ("eq", "ne", "lt", "le", "gt", "ge", "contains"):
@@ -817,46 +814,31 @@ def test_infer_equality_op_sig(self) -> None:
 
     def test_infer_unary_op_sig(self) -> None:
         for op in ("neg", "pos"):
-            assert_equal(infer_method_args(f"__{op}__"), [self_arg])
+            assert_equal(infer_c_method_args(f"__{op}__"), [self_arg])
 
     def test_infer_cast_sig(self) -> None:
         for op in ("float", "bool", "bytes", "int"):
             assert_equal(infer_method_ret_type(f"__{op}__"), op)
 
-    def test_generate_c_type_stub_no_crash_for_object(self) -> None:
+    def test_generate_class_stub_no_crash_for_object(self) -> None:
         output: list[str] = []
         mod = ModuleType("module", "")  # any module is fine
-        imports: list[str] = []
-        generate_c_type_stub(
-            mod,
-            "alias",
-            object,
-            output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
-        assert_equal(imports, [])
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+
+        gen.generate_class_stub("alias", object, output)
+        assert_equal(gen.get_imports().splitlines(), [])
         assert_equal(output[0], "class alias:")
 
-    def test_generate_c_type_stub_variable_type_annotation(self) -> None:
+    def test_generate_class_stub_variable_type_annotation(self) -> None:
         # This class mimics the stubgen unit test 'testClassVariable'
         class TestClassVariableCls:
             x = 1
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType("module", "")  # any module is fine
-        generate_c_type_stub(
-            mod,
-            "C",
-            TestClassVariableCls,
-            output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
-        assert_equal(imports, [])
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_class_stub("C", TestClassVariableCls, output)
+        assert_equal(gen.get_imports().splitlines(), ["from typing import ClassVar"])
         assert_equal(output, ["class C:", "    x: ClassVar[int] = ..."])
 
     def test_generate_c_type_inheritance(self) -> None:
@@ -864,35 +846,19 @@ class TestClass(KeyError):
             pass
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType("module, ")
-        generate_c_type_stub(
-            mod,
-            "C",
-            TestClass,
-            output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_class_stub("C", TestClass, output)
         assert_equal(output, ["class C(KeyError): ..."])
-        assert_equal(imports, [])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_type_inheritance_same_module(self) -> None:
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestBaseClass.__module__, "")
-        generate_c_type_stub(
-            mod,
-            "C",
-            TestClass,
-            output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_class_stub("C", TestClass, output)
         assert_equal(output, ["class C(TestBaseClass): ..."])
-        assert_equal(imports, [])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_type_inheritance_other_module(self) -> None:
         import argparse
@@ -901,38 +867,22 @@ class TestClass(argparse.Action):
             pass
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType("module", "")
-        generate_c_type_stub(
-            mod,
-            "C",
-            TestClass,
-            output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_class_stub("C", TestClass, output)
         assert_equal(output, ["class C(argparse.Action): ..."])
-        assert_equal(imports, ["import argparse"])
+        assert_equal(gen.get_imports().splitlines(), ["import argparse"])
 
     def test_generate_c_type_inheritance_builtin_type(self) -> None:
         class TestClass(type):
             pass
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType("module", "")
-        generate_c_type_stub(
-            mod,
-            "C",
-            TestClass,
-            output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_class_stub("C", TestClass, output)
         assert_equal(output, ["class C(type): ..."])
-        assert_equal(imports, [])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_type_with_docstring(self) -> None:
         class TestClass:
@@ -942,22 +892,16 @@ def test(self, arg0: str) -> None:
                 """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"),
         )
         assert_equal(output, ["def test(self, arg0: int) -> Any: ..."])
-        assert_equal(imports, [])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_type_with_docstring_no_self_arg(self) -> None:
         class TestClass:
@@ -967,22 +911,16 @@ def test(self, arg0: str) -> None:
                 """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"),
         )
         assert_equal(output, ["def test(self, arg0: int) -> Any: ..."])
-        assert_equal(imports, [])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_type_classmethod(self) -> None:
         class TestClass:
@@ -991,22 +929,16 @@ def test(cls, arg0: str) -> None:
                 pass
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="cls",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="cls", cls=TestClass, name="TestClass"),
         )
-        assert_equal(output, ["@classmethod", "def test(cls, *args, **kwargs) -> Any: ..."])
-        assert_equal(imports, [])
+        assert_equal(output, ["@classmethod", "def test(cls, *args, **kwargs): ..."])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_type_classmethod_with_overloads(self) -> None:
         class TestClass:
@@ -1019,19 +951,13 @@ def test(self, arg0: str) -> None:
                 pass
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="cls",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="cls", cls=TestClass, name="TestClass"),
         )
         assert_equal(
             output,
@@ -1044,7 +970,7 @@ def test(self, arg0: str) -> None:
                 "def test(cls, arg0: int) -> Any: ...",
             ],
         )
-        assert_equal(imports, ["from typing import overload"])
+        assert_equal(gen.get_imports().splitlines(), ["from typing import overload"])
 
     def test_generate_c_type_with_docstring_empty_default(self) -> None:
         class TestClass:
@@ -1054,22 +980,16 @@ def test(self, arg0: str = "") -> None:
                 """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"),
         )
         assert_equal(output, ["def test(self, arg0: str = ...) -> Any: ..."])
-        assert_equal(imports, [])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_function_other_module_arg(self) -> None:
         """Test that if argument references type from other module, module will be imported."""
@@ -1082,19 +1002,11 @@ def test(arg0: str) -> None:
             """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(self.__module__, "")
-        generate_c_function_stub(
-            mod,
-            "test",
-            test,
-            output=output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub("test", test, output=output)
         assert_equal(output, ["def test(arg0: argparse.Action) -> Any: ..."])
-        assert_equal(imports, ["import argparse"])
+        assert_equal(gen.get_imports().splitlines(), ["import argparse"])
 
     def test_generate_c_function_same_module(self) -> None:
         """Test that if annotation references type from same module but using full path, no module
@@ -1109,19 +1021,11 @@ def test(arg0: str) -> None:
             """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType("argparse", "")
-        generate_c_function_stub(
-            mod,
-            "test",
-            test,
-            output=output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub("test", test, output=output)
         assert_equal(output, ["def test(arg0: Action) -> Action: ..."])
-        assert_equal(imports, [])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_function_other_module(self) -> None:
         """Test that if annotation references type from other module, module will be imported."""
@@ -1132,19 +1036,11 @@ def test(arg0: str) -> None:
             """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(self.__module__, "")
-        generate_c_function_stub(
-            mod,
-            "test",
-            test,
-            output=output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub("test", test, output=output)
         assert_equal(output, ["def test(arg0: argparse.Action) -> argparse.Action: ..."])
-        assert_equal(set(imports), {"import argparse"})
+        assert_equal(gen.get_imports().splitlines(), ["import argparse"])
 
     def test_generate_c_function_same_module_nested(self) -> None:
         """Test that if annotation references type from same module but using full path, no module
@@ -1159,19 +1055,11 @@ def test(arg0: str) -> None:
             """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType("argparse", "")
-        generate_c_function_stub(
-            mod,
-            "test",
-            test,
-            output=output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub("test", test, output=output)
         assert_equal(output, ["def test(arg0: list[Action]) -> list[Action]: ..."])
-        assert_equal(imports, [])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_function_same_module_compound(self) -> None:
         """Test that if annotation references type from same module but using full path, no module
@@ -1186,19 +1074,11 @@ def test(arg0: str) -> None:
             """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType("argparse", "")
-        generate_c_function_stub(
-            mod,
-            "test",
-            test,
-            output=output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
-        assert_equal(output, ["def test(arg0: Union[Action,None]) -> Tuple[Action,None]: ..."])
-        assert_equal(imports, [])
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub("test", test, output=output)
+        assert_equal(output, ["def test(arg0: Union[Action, None]) -> Tuple[Action, None]: ..."])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_function_other_module_nested(self) -> None:
         """Test that if annotation references type from other module, module will be imported,
@@ -1210,19 +1090,13 @@ def test(arg0: str) -> None:
             """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(self.__module__, "")
-        generate_c_function_stub(
-            mod,
-            "test",
-            test,
-            output=output,
-            imports=imports,
-            known_modules=["foo", "foo.spangle", "bar"],
-            sig_generators=get_sig_generators(parse_options([])),
+        gen = InspectionStubGenerator(
+            mod.__name__, known_modules=["foo", "foo.spangle", "bar"], module=mod
         )
+        gen.generate_function_stub("test", test, output=output)
         assert_equal(output, ["def test(arg0: foo.bar.Action) -> other.Thing: ..."])
-        assert_equal(set(imports), {"import foo", "import other"})
+        assert_equal(gen.get_imports().splitlines(), ["import foo", "import other"])
 
     def test_generate_c_function_no_crash_for_non_str_docstring(self) -> None:
         def test(arg0: str) -> None:
@@ -1231,19 +1105,11 @@ def test(arg0: str) -> None:
         test.__doc__ = property(lambda self: "test(arg0: str) -> None")  # type: ignore[assignment]
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(self.__module__, "")
-        generate_c_function_stub(
-            mod,
-            "test",
-            test,
-            output=output,
-            imports=imports,
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
-        )
-        assert_equal(output, ["def test(*args, **kwargs) -> Any: ..."])
-        assert_equal(imports, [])
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub("test", test, output=output)
+        assert_equal(output, ["def test(*args, **kwargs): ..."])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_property_with_pybind11(self) -> None:
         """Signatures included by PyBind11 inside property.fget are read."""
@@ -1258,13 +1124,15 @@ def get_attribute(self) -> None:
 
         readwrite_properties: list[str] = []
         readonly_properties: list[str] = []
-        generate_c_property_stub(
+        mod = ModuleType("module", "")  # any module is fine
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_property_stub(
             "attribute",
+            TestClass.__dict__["attribute"],
             TestClass.attribute,
             [],
             readwrite_properties,
             readonly_properties,
-            is_c_property_readonly(TestClass.attribute),
         )
         assert_equal(readwrite_properties, [])
         assert_equal(readonly_properties, ["@property", "def attribute(self) -> str: ..."])
@@ -1284,15 +1152,17 @@ def attribute(self, value: int) -> None:
 
         readwrite_properties: list[str] = []
         readonly_properties: list[str] = []
-        generate_c_property_stub(
+        mod = ModuleType("module", "")  # any module is fine
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_property_stub(
             "attribute",
-            type(TestClass.attribute),
+            TestClass.__dict__["attribute"],
+            TestClass.attribute,
             [],
             readwrite_properties,
             readonly_properties,
-            is_c_property_readonly(TestClass.attribute),
         )
-        assert_equal(readwrite_properties, ["attribute: Any"])
+        assert_equal(readwrite_properties, ["attribute: Incomplete"])
         assert_equal(readonly_properties, [])
 
     def test_generate_c_type_with_single_arg_generic(self) -> None:
@@ -1303,22 +1173,16 @@ def test(self, arg0: str) -> None:
                 """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"),
         )
         assert_equal(output, ["def test(self, arg0: List[int]) -> Any: ..."])
-        assert_equal(imports, [])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_type_with_double_arg_generic(self) -> None:
         class TestClass:
@@ -1328,22 +1192,16 @@ def test(self, arg0: str) -> None:
                 """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"),
         )
-        assert_equal(output, ["def test(self, arg0: Dict[str,int]) -> Any: ..."])
-        assert_equal(imports, [])
+        assert_equal(output, ["def test(self, arg0: Dict[str, int]) -> Any: ..."])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_type_with_nested_generic(self) -> None:
         class TestClass:
@@ -1353,22 +1211,16 @@ def test(self, arg0: str) -> None:
                 """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"),
         )
-        assert_equal(output, ["def test(self, arg0: Dict[str,List[int]]) -> Any: ..."])
-        assert_equal(imports, [])
+        assert_equal(output, ["def test(self, arg0: Dict[str, List[int]]) -> Any: ..."])
+        assert_equal(gen.get_imports().splitlines(), [])
 
     def test_generate_c_type_with_generic_using_other_module_first(self) -> None:
         class TestClass:
@@ -1378,22 +1230,16 @@ def test(self, arg0: str) -> None:
                 """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"),
         )
-        assert_equal(output, ["def test(self, arg0: Dict[argparse.Action,int]) -> Any: ..."])
-        assert_equal(imports, ["import argparse"])
+        assert_equal(output, ["def test(self, arg0: Dict[argparse.Action, int]) -> Any: ..."])
+        assert_equal(gen.get_imports().splitlines(), ["import argparse"])
 
     def test_generate_c_type_with_generic_using_other_module_last(self) -> None:
         class TestClass:
@@ -1403,22 +1249,16 @@ def test(self, arg0: str) -> None:
                 """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "test",
             TestClass.test,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"),
         )
-        assert_equal(output, ["def test(self, arg0: Dict[str,argparse.Action]) -> Any: ..."])
-        assert_equal(imports, ["import argparse"])
+        assert_equal(output, ["def test(self, arg0: Dict[str, argparse.Action]) -> Any: ..."])
+        assert_equal(gen.get_imports().splitlines(), ["import argparse"])
 
     def test_generate_c_type_with_overload_pybind11(self) -> None:
         class TestClass:
@@ -1433,19 +1273,13 @@ def __init__(self, arg0: str) -> None:
                 """
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "__init__",
             TestClass.__init__,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"),
         )
         assert_equal(
             output,
@@ -1458,7 +1292,7 @@ def __init__(self, arg0: str) -> None:
                 "def __init__(self, *args, **kwargs) -> Any: ...",
             ],
         )
-        assert_equal(set(imports), {"from typing import overload"})
+        assert_equal(gen.get_imports().splitlines(), ["from typing import overload"])
 
     def test_generate_c_type_with_overload_shiboken(self) -> None:
         class TestClass:
@@ -1471,19 +1305,18 @@ def __init__(self, arg0: str) -> None:
                 pass
 
         output: list[str] = []
-        imports: list[str] = []
         mod = ModuleType(TestClass.__module__, "")
-        generate_c_function_stub(
-            mod,
+        gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod)
+        gen.generate_function_stub(
             "__init__",
             TestClass.__init__,
             output=output,
-            imports=imports,
-            self_var="self",
-            cls=TestClass,
-            class_name="TestClass",
-            known_modules=[mod.__name__],
-            sig_generators=get_sig_generators(parse_options([])),
+            class_info=ClassInfo(
+                self_var="self",
+                cls=TestClass,
+                name="TestClass",
+                docstring=getattr(TestClass, "__doc__", None),
+            ),
         )
         assert_equal(
             output,
@@ -1494,7 +1327,7 @@ def __init__(self, arg0: str) -> None:
                 "def __init__(self, arg0: str, arg1: str) -> None: ...",
             ],
         )
-        assert_equal(set(imports), {"from typing import overload"})
+        assert_equal(gen.get_imports().splitlines(), ["from typing import overload"])
 
 
 class ArgSigSuite(unittest.TestCase):
diff --git a/mypy/traverser.py b/mypy/traverser.py
index 2fcc376cfb7c..d11dd395f978 100644
--- a/mypy/traverser.py
+++ b/mypy/traverser.py
@@ -2,7 +2,7 @@
 
 from __future__ import annotations
 
-from mypy_extensions import mypyc_attr
+from mypy_extensions import mypyc_attr, trait
 
 from mypy.nodes import (
     REVEAL_TYPE,
@@ -94,6 +94,7 @@
 from mypy.visitor import NodeVisitor
 
 
+@trait
 @mypyc_attr(allow_interpreted_subclasses=True)
 class TraverserVisitor(NodeVisitor[None]):
     """A parse tree visitor that traverses the parse tree during visiting.
diff --git a/setup.py b/setup.py
index dcbdc96b3ccf..e3ebe9dd62ec 100644
--- a/setup.py
+++ b/setup.py
@@ -112,7 +112,6 @@ def run(self):
             "stubtest.py",
             "stubgenc.py",
             "stubdoc.py",
-            "stubutil.py",
         )
     ) + (
         # Don't want to grab this accidentally
diff --git a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi
index e69de29bb2d1..0cb252f00259 100644
--- a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi
+++ b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi
@@ -0,0 +1 @@
+from . import basics as basics
diff --git a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi
index ab5a4f4e78d2..6527f5733eaf 100644
--- a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi
+++ b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi
@@ -1,7 +1,7 @@
-from typing import ClassVar
+from typing import ClassVar, overload
 
-from typing import overload
 PI: float
+__version__: str
 
 class Point:
     class AngleUnit:
@@ -11,12 +11,10 @@ class Point:
         radian: ClassVar[Point.AngleUnit] = ...
         def __init__(self, value: int) -> None: ...
         def __eq__(self, other: object) -> bool: ...
-        def __getstate__(self) -> int: ...
         def __hash__(self) -> int: ...
         def __index__(self) -> int: ...
         def __int__(self) -> int: ...
         def __ne__(self, other: object) -> bool: ...
-        def __setstate__(self, state: int) -> None: ...
         @property
         def name(self) -> str: ...
         @property
@@ -30,12 +28,10 @@ class Point:
         pixel: ClassVar[Point.LengthUnit] = ...
         def __init__(self, value: int) -> None: ...
         def __eq__(self, other: object) -> bool: ...
-        def __getstate__(self) -> int: ...
         def __hash__(self) -> int: ...
         def __index__(self) -> int: ...
         def __int__(self) -> int: ...
         def __ne__(self, other: object) -> bool: ...
-        def __setstate__(self, state: int) -> None: ...
         @property
         def name(self) -> str: ...
         @property
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index 23dbf36a551b..d83d74306230 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -11,6 +11,11 @@ def f() -> None: ...
 
 [case testTwoFunctions]
 def f(a, b):
+    """
+    this is a docstring
+
+    more.
+    """
     x = 1
 def g(arg):
     pass
@@ -37,11 +42,21 @@ def f(x=True, y=False): ...
 [out]
 def f(x: bool = ..., y: bool = ...) -> None: ...
 
+[case testDefaultArgBool_inspect]
+def f(x=True, y=False): ...
+[out]
+def f(x: bool = ..., y: bool = ...): ...
+
 [case testDefaultArgStr]
 def f(x='foo'): ...
 [out]
 def f(x: str = ...) -> None: ...
 
+[case testDefaultArgStr_inspect]
+def f(x='foo'): ...
+[out]
+def f(x: str = ...): ...
+
 [case testDefaultArgBytes]
 def f(x=b'foo'): ...
 [out]
@@ -300,6 +315,7 @@ __all__ = []
 __author__ = ''
 __version__ = ''
 [out]
+__version__: str
 
 [case testBaseClass]
 class A: ...
@@ -361,6 +377,24 @@ class A:
     def f(self, x) -> None: ...
     def h(self) -> None: ...
 
+-- a read/write property is treated the same as an attribute
+[case testProperty_inspect]
+class A:
+    @property
+    def f(self):
+        return 1
+    @f.setter
+    def f(self, x): ...
+
+    def h(self):
+        self.f = 1
+[out]
+from _typeshed import Incomplete
+
+class A:
+    f: Incomplete
+    def h(self): ...
+
 [case testFunctoolsCachedProperty]
 import functools
 
@@ -435,6 +469,15 @@ class A:
     @classmethod
     def f(cls) -> None: ...
 
+[case testClassMethod_inspect]
+class A:
+    @classmethod
+    def f(cls): ...
+[out]
+class A:
+    @classmethod
+    def f(cls): ...
+
 [case testIfMainCheck]
 def a(): ...
 if __name__ == '__main__':
@@ -472,6 +515,23 @@ class B: ...
 class C:
     def f(self) -> None: ...
 
+[case testNoSpacesBetweenEmptyClasses_inspect]
+class X:
+    def g(self): ...
+class A: ...
+class B: ...
+class C:
+    def f(self): ...
+[out]
+class X:
+    def g(self): ...
+
+class A: ...
+class B: ...
+
+class C:
+    def f(self): ...
+
 [case testExceptionBaseClasses]
 class A(Exception): ...
 class B(ValueError): ...
@@ -490,6 +550,17 @@ class A:
 class A:
     def __eq__(self): ...
 
+[case testOmitSomeSpecialMethods_inspect]
+class A:
+    def __str__(self): ...
+    def __repr__(self): ...
+    def __eq__(self): ...
+    def __getstate__(self): ...
+    def __setstate__(self, state): ...
+[out]
+class A:
+    def __eq__(self) -> bool: ...
+
 -- Tests that will perform runtime imports of modules.
 -- Don't use `_import` suffix if there are unquoted forward references.
 
@@ -507,6 +578,13 @@ def g(): ...
 [out]
 def f() -> None: ...
 
+[case testOmitDefsNotInAll_inspect]
+__all__ = [] + ['f']
+def f(): ...
+def g(): ...
+[out]
+def f(): ...
+
 [case testVarDefsNotInAll_import]
 __all__ = [] + ['f', 'g']
 def f(): ...
@@ -517,6 +595,16 @@ def g(): ...
 def f() -> None: ...
 def g() -> None: ...
 
+[case testVarDefsNotInAll_inspect]
+__all__ = [] + ['f', 'g']
+def f(): ...
+x = 1
+y = 1
+def g(): ...
+[out]
+def f(): ...
+def g(): ...
+
 [case testIncludeClassNotInAll_import]
 __all__ = [] + ['f']
 def f(): ...
@@ -526,6 +614,15 @@ def f() -> None: ...
 
 class A: ...
 
+[case testIncludeClassNotInAll_inspect]
+__all__ = [] + ['f']
+def f(): ...
+class A: ...
+[out]
+def f(): ...
+
+class A: ...
+
 [case testAllAndClass_import]
 __all__ = ['A']
 class A:
@@ -636,6 +733,23 @@ class C:
 # Names in __all__ with no definition:
 #   g
 
+[case testCommentForUndefinedName_inspect]
+__all__ = ['f', 'x', 'C', 'g']
+def f(): ...
+x = 1
+class C:
+    def g(self): ...
+[out]
+def f(): ...
+
+x: int
+
+class C:
+    def g(self): ...
+
+# Names in __all__ with no definition:
+#   g
+
 [case testIgnoreSlots]
 class A:
     __slots__ = ()
@@ -649,6 +763,13 @@ class A:
 [out]
 class A: ...
 
+[case testSkipPrivateProperty_inspect]
+class A:
+    @property
+    def _foo(self): ...
+[out]
+class A: ...
+
 [case testIncludePrivateProperty]
 # flags:  --include-private
 class A:
@@ -659,6 +780,16 @@ class A:
     @property
     def _foo(self) -> None: ...
 
+[case testIncludePrivateProperty_inspect]
+# flags:  --include-private
+class A:
+    @property
+    def _foo(self): ...
+[out]
+class A:
+    @property
+    def _foo(self): ...
+
 [case testSkipPrivateStaticAndClassMethod]
 class A:
     @staticmethod
@@ -668,6 +799,15 @@ class A:
 [out]
 class A: ...
 
+[case testSkipPrivateStaticAndClassMethod_inspect]
+class A:
+    @staticmethod
+    def _foo(): ...
+    @classmethod
+    def _bar(cls): ...
+[out]
+class A: ...
+
 [case testIncludePrivateStaticAndClassMethod]
 # flags:  --include-private
 class A:
@@ -682,6 +822,20 @@ class A:
     @classmethod
     def _bar(cls) -> None: ...
 
+[case testIncludePrivateStaticAndClassMethod_inspect]
+# flags:  --include-private
+class A:
+    @staticmethod
+    def _foo(): ...
+    @classmethod
+    def _bar(cls): ...
+[out]
+class A:
+    @staticmethod
+    def _foo(): ...
+    @classmethod
+    def _bar(cls): ...
+
 [case testNamedtuple]
 import collections, typing, x
 X = collections.namedtuple('X', ['a', 'b'])
@@ -1801,6 +1955,19 @@ class Outer:
     class Inner: ...
 A = Outer.Inner
 
+-- needs improvement
+[case testNestedClass_inspect]
+class Outer:
+    class Inner:
+        pass
+
+A = Outer.Inner
+[out]
+class Outer:
+    class Inner: ...
+
+class A: ...
+
 [case testFunctionAlias_semanal]
 from asyncio import coroutine
 
@@ -2034,6 +2201,25 @@ class A:
 def f(x) -> None: ...
 def g(x, y: str): ...
 
+class A:
+    def f(self, x) -> None: ...
+
+-- Same as above
+[case testFunctionPartiallyAnnotated_inspect]
+def f(x) -> None:
+    pass
+
+def g(x, y: str):
+    pass
+
+class A:
+    def f(self, x) -> None:
+        pass
+
+[out]
+def f(x) -> None: ...
+def g(x, y: str): ...
+
 class A:
     def f(self, x) -> None: ...
 
@@ -2054,6 +2240,24 @@ def f(x: Any): ...
 def g(x, y: Any) -> str: ...
 def h(x: Any) -> str: ...
 
+-- Same as above
+[case testExplicitAnyArg_inspect]
+from typing import Any
+
+def f(x: Any):
+    pass
+def g(x, y: Any) -> str:
+    pass
+def h(x: Any) -> str:
+    pass
+
+[out]
+from typing import Any
+
+def f(x: Any): ...
+def g(x, y: Any) -> str: ...
+def h(x: Any) -> str: ...
+
 [case testExplicitReturnedAny]
 from typing import Any
 
@@ -2385,6 +2589,28 @@ def g() -> None: ...
 <out/p/tests.pyi was not generated>
 <out/p/tests/test_foo.pyi was not generated>
 
+[case testTestFiles_inspect]
+# modules: p p.x p.tests p.tests.test_foo
+
+[file p/__init__.py]
+def f(): pass
+
+[file p/x.py]
+def g(): pass
+
+[file p/tests/__init__.py]
+
+[file p/tests/test_foo.py]
+def test_thing(): pass
+
+[out]
+# p/__init__.pyi
+def f(): ...
+# p/x.pyi
+def g(): ...
+<out/p/tests.pyi was not generated>
+<out/p/tests/test_foo.pyi was not generated>
+
 [case testVerboseFlag]
 # Just test that --verbose does not break anything in a basic test case.
 # flags: --verbose
@@ -2686,6 +2912,8 @@ __uri__ = ''
 __version__ = ''
 
 [out]
+from m import __version__ as __version__
+
 class A: ...
 
 [case testHideDunderModuleAttributesWithAll_import]
@@ -2715,6 +2943,7 @@ __uri__ = ''
 __version__ = ''
 
 [out]
+from m import __version__ as __version__
 
 [case testAttrsClass_semanal]
 import attrs
@@ -2949,7 +3178,6 @@ class A:
     @overload
     def f(self, x: Tuple[int, int]) -> int: ...
 
-
 @overload
 def f(x: int, y: int) -> int: ...
 @overload
@@ -2993,7 +3221,6 @@ class A:
     @overload
     def f(self, x: Tuple[int, int]) -> int: ...
 
-
 @overload
 def f(x: int, y: int) -> int: ...
 @overload
@@ -3068,7 +3295,6 @@ class A:
     @classmethod
     def g(cls, x: typing.Tuple[int, int]) -> int: ...
 
-
 @typing.overload
 def f(x: int, y: int) -> int: ...
 @typing.overload
@@ -3147,7 +3373,6 @@ class A:
     @classmethod
     def g(cls, x: t.Tuple[int, int]) -> int: ...
 
-
 @t.overload
 def f(x: int, y: int) -> int: ...
 @t.overload
@@ -3345,6 +3570,67 @@ class Some:
     def __float__(self) -> float: ...
     def __index__(self) -> int: ...
 
+-- Same as above
+[case testKnownMagicMethodsReturnTypes_inspect]
+class Some:
+    def __len__(self): ...
+    def __length_hint__(self): ...
+    def __init__(self): ...
+    def __del__(self): ...
+    def __bool__(self): ...
+    def __bytes__(self): ...
+    def __format__(self, spec): ...
+    def __contains__(self, obj): ...
+    def __complex__(self): ...
+    def __int__(self): ...
+    def __float__(self): ...
+    def __index__(self): ...
+[out]
+class Some:
+    def __len__(self) -> int: ...
+    def __length_hint__(self) -> int: ...
+    def __init__(self) -> None: ...
+    def __del__(self) -> None: ...
+    def __bool__(self) -> bool: ...
+    def __bytes__(self) -> bytes: ...
+    def __format__(self, spec) -> str: ...
+    def __contains__(self, obj) -> bool: ...
+    def __complex__(self) -> complex: ...
+    def __int__(self) -> int: ...
+    def __float__(self) -> float: ...
+    def __index__(self) -> int: ...
+
+
+[case testKnownMagicMethodsArgTypes]
+class MismatchNames:
+    def __exit__(self, tp, val, tb): ...
+
+class MatchNames:
+    def __exit__(self, type, value, traceback): ...
+
+[out]
+class MismatchNames:
+    def __exit__(self, tp: type[BaseException] | None, val: BaseException | None, tb: types.TracebackType | None) -> None: ...
+
+class MatchNames:
+    def __exit__(self, type: type[BaseException] | None, value: BaseException | None, traceback: types.TracebackType | None) -> None: ...
+
+-- Same as above (but can generate import statements)
+[case testKnownMagicMethodsArgTypes_inspect]
+class MismatchNames:
+    def __exit__(self, tp, val, tb): ...
+
+class MatchNames:
+    def __exit__(self, type, value, traceback): ...
+
+[out]
+import types
+
+class MismatchNames:
+    def __exit__(self, tp: type[BaseException] | None, val: BaseException | None, tb: types.TracebackType | None): ...
+
+class MatchNames:
+    def __exit__(self, type: type[BaseException] | None, value: BaseException | None, traceback: types.TracebackType | None): ...
 
 [case testTypeVarPEP604Bound]
 from typing import TypeVar
@@ -3397,7 +3683,7 @@ from typing import TypedDict
 X = TypedDict('X', a=int, b=str)
 Y = TypedDict('X', a=int, b=str, total=False)
 [out]
-from typing import TypedDict
+from typing_extensions import TypedDict
 
 class X(TypedDict):
     a: int

From 2bcec24635670bcff6efab3d21641f39f0f35857 Mon Sep 17 00:00:00 2001
From: Valentin Stanciu <250871+svalentin@users.noreply.github.com>
Date: Mon, 16 Oct 2023 18:37:23 +0100
Subject: [PATCH 178/288] Stream dmypy output instead of dumping everything at
 the end (#16252)

This does 2 things:
1. It changes the IPC code to work with multiple messages.
2. It changes the dmypy client/server communication so that it streams
stdout/stderr instead of dumping everything at the end.

For 1, we have to provide a way to separate out different messages. I
chose to frame messages as bytes separated by whitespace character. That
means we have to encode the message in a scheme that escapes whitespace.
The `codecs.encode(<bytes_data>, 'base64')` seems reasonable. It encodes more
than needed but the application is not IPC IO limited so it should be fine.
With this convention in place, all we have to do is read from the socket
stream until we have a whitespace character.
The framing logic can be easily changed.

For 2, since we communicate with JSONs, it's easy to add a "finished"
key that tells us it's the final response from dmypy. Anything else is
just stdout/stderr output.

Note: dmypy server also returns out/err which is the output of actual
mypy type checking. Right now this change does not stream that output.
We can stream that in a followup change. We just have to decide on how
to differenciate the 4 text streams (stdout/stderr/out/err) that will
now be interleaved.

The WriteToConn class could use more love. I just put a bare minimum.

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 mypy/dmypy/client.py | 35 +++++++++++------------
 mypy/dmypy_server.py | 20 ++++++--------
 mypy/dmypy_util.py   | 33 ++++++++++++++++++++--
 mypy/ipc.py          | 66 ++++++++++++++++++++++++++++++++++++--------
 mypy/test/testipc.py | 52 ++++++++++++++++++++++++++++------
 5 files changed, 155 insertions(+), 51 deletions(-)

diff --git a/mypy/dmypy/client.py b/mypy/dmypy/client.py
index c3a2308d1b44..229740e44db0 100644
--- a/mypy/dmypy/client.py
+++ b/mypy/dmypy/client.py
@@ -17,7 +17,7 @@
 from typing import Any, Callable, Mapping, NoReturn
 
 from mypy.dmypy_os import alive, kill
-from mypy.dmypy_util import DEFAULT_STATUS_FILE, receive
+from mypy.dmypy_util import DEFAULT_STATUS_FILE, receive, send
 from mypy.ipc import IPCClient, IPCException
 from mypy.util import check_python_version, get_terminal_width, should_force_color
 from mypy.version import __version__
@@ -659,28 +659,29 @@ def request(
     # so that it can format the type checking output accordingly.
     args["is_tty"] = sys.stdout.isatty() or should_force_color()
     args["terminal_width"] = get_terminal_width()
-    bdata = json.dumps(args).encode("utf8")
     _, name = get_status(status_file)
     try:
         with IPCClient(name, timeout) as client:
-            client.write(bdata)
-            response = receive(client)
+            send(client, args)
+
+            final = False
+            while not final:
+                response = receive(client)
+                final = bool(response.pop("final", False))
+                # Display debugging output written to stdout/stderr in the server process for convenience.
+                # This should not be confused with "out" and "err" fields in the response.
+                # Those fields hold the output of the "check" command, and are handled in check_output().
+                stdout = response.pop("stdout", None)
+                if stdout:
+                    sys.stdout.write(stdout)
+                stderr = response.pop("stderr", None)
+                if stderr:
+                    sys.stderr.write(stderr)
     except (OSError, IPCException) as err:
         return {"error": str(err)}
     # TODO: Other errors, e.g. ValueError, UnicodeError
-    else:
-        # Display debugging output written to stdout/stderr in the server process for convenience.
-        # This should not be confused with "out" and "err" fields in the response.
-        # Those fields hold the output of the "check" command, and are handled in check_output().
-        stdout = response.get("stdout")
-        if stdout:
-            sys.stdout.write(stdout)
-        stderr = response.get("stderr")
-        if stderr:
-            print("-" * 79)
-            print("stderr:")
-            sys.stdout.write(stderr)
-        return response
+
+    return response
 
 
 def get_status(status_file: str) -> tuple[int, str]:
diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py
index faa9a23fadfb..9cc0888fc208 100644
--- a/mypy/dmypy_server.py
+++ b/mypy/dmypy_server.py
@@ -23,7 +23,7 @@
 import mypy.build
 import mypy.errors
 import mypy.main
-from mypy.dmypy_util import receive
+from mypy.dmypy_util import WriteToConn, receive, send
 from mypy.find_sources import InvalidSourceList, create_source_list
 from mypy.fscache import FileSystemCache
 from mypy.fswatcher import FileData, FileSystemWatcher
@@ -208,10 +208,12 @@ def _response_metadata(self) -> dict[str, str]:
 
     def serve(self) -> None:
         """Serve requests, synchronously (no thread or fork)."""
+
         command = None
         server = IPCServer(CONNECTION_NAME, self.timeout)
         orig_stdout = sys.stdout
         orig_stderr = sys.stderr
+
         try:
             with open(self.status_file, "w") as f:
                 json.dump({"pid": os.getpid(), "connection_name": server.connection_name}, f)
@@ -219,10 +221,8 @@ def serve(self) -> None:
             while True:
                 with server:
                     data = receive(server)
-                    debug_stdout = io.StringIO()
-                    debug_stderr = io.StringIO()
-                    sys.stdout = debug_stdout
-                    sys.stderr = debug_stderr
+                    sys.stdout = WriteToConn(server, "stdout")  # type: ignore[assignment]
+                    sys.stderr = WriteToConn(server, "stderr")  # type: ignore[assignment]
                     resp: dict[str, Any] = {}
                     if "command" not in data:
                         resp = {"error": "No command found in request"}
@@ -239,15 +239,13 @@ def serve(self) -> None:
                                 tb = traceback.format_exception(*sys.exc_info())
                                 resp = {"error": "Daemon crashed!\n" + "".join(tb)}
                                 resp.update(self._response_metadata())
-                                resp["stdout"] = debug_stdout.getvalue()
-                                resp["stderr"] = debug_stderr.getvalue()
-                                server.write(json.dumps(resp).encode("utf8"))
+                                resp["final"] = True
+                                send(server, resp)
                                 raise
-                    resp["stdout"] = debug_stdout.getvalue()
-                    resp["stderr"] = debug_stderr.getvalue()
+                    resp["final"] = True
                     try:
                         resp.update(self._response_metadata())
-                        server.write(json.dumps(resp).encode("utf8"))
+                        send(server, resp)
                     except OSError:
                         pass  # Maybe the client hung up
                     if command == "stop":
diff --git a/mypy/dmypy_util.py b/mypy/dmypy_util.py
index 2aae41d998da..d95cba9f40b5 100644
--- a/mypy/dmypy_util.py
+++ b/mypy/dmypy_util.py
@@ -6,7 +6,7 @@
 from __future__ import annotations
 
 import json
-from typing import Any, Final
+from typing import Any, Final, Iterable
 
 from mypy.ipc import IPCBase
 
@@ -14,7 +14,7 @@
 
 
 def receive(connection: IPCBase) -> Any:
-    """Receive JSON data from a connection until EOF.
+    """Receive single JSON data frame from a connection.
 
     Raise OSError if the data received is not valid JSON or if it is
     not a dict.
@@ -23,9 +23,36 @@ def receive(connection: IPCBase) -> Any:
     if not bdata:
         raise OSError("No data received")
     try:
-        data = json.loads(bdata.decode("utf8"))
+        data = json.loads(bdata)
     except Exception as e:
         raise OSError("Data received is not valid JSON") from e
     if not isinstance(data, dict):
         raise OSError(f"Data received is not a dict ({type(data)})")
     return data
+
+
+def send(connection: IPCBase, data: Any) -> None:
+    """Send data to a connection encoded and framed.
+
+    The data must be JSON-serializable. We assume that a single send call is a
+    single frame to be sent on the connect.
+    """
+    connection.write(json.dumps(data))
+
+
+class WriteToConn:
+    """Helper class to write to a connection instead of standard output."""
+
+    def __init__(self, server: IPCBase, output_key: str = "stdout"):
+        self.server = server
+        self.output_key = output_key
+
+    def write(self, output: str) -> int:
+        resp: dict[str, Any] = {}
+        resp[self.output_key] = output
+        send(self.server, resp)
+        return len(output)
+
+    def writelines(self, lines: Iterable[str]) -> None:
+        for s in lines:
+            self.write(s)
diff --git a/mypy/ipc.py b/mypy/ipc.py
index d026f2429a0f..ab01f1b79e7d 100644
--- a/mypy/ipc.py
+++ b/mypy/ipc.py
@@ -7,6 +7,7 @@
 from __future__ import annotations
 
 import base64
+import codecs
 import os
 import shutil
 import sys
@@ -40,6 +41,10 @@ class IPCBase:
 
     This contains logic shared between the client and server, such as reading
     and writing.
+    We want to be able to send multiple "messages" over a single connection and
+    to be able to separate the messages. We do this by encoding the messages
+    in an alphabet that does not contain spaces, then adding a space for
+    separation. The last framed message is also followed by a space.
     """
 
     connection: _IPCHandle
@@ -47,12 +52,30 @@ class IPCBase:
     def __init__(self, name: str, timeout: float | None) -> None:
         self.name = name
         self.timeout = timeout
+        self.buffer = bytearray()
 
-    def read(self, size: int = 100000) -> bytes:
-        """Read bytes from an IPC connection until its empty."""
-        bdata = bytearray()
+    def frame_from_buffer(self) -> bytearray | None:
+        """Return a full frame from the bytes we have in the buffer."""
+        space_pos = self.buffer.find(b" ")
+        if space_pos == -1:
+            return None
+        # We have a full frame
+        bdata = self.buffer[:space_pos]
+        self.buffer = self.buffer[space_pos + 1 :]
+        return bdata
+
+    def read(self, size: int = 100000) -> str:
+        """Read bytes from an IPC connection until we have a full frame."""
+        bdata: bytearray | None = bytearray()
         if sys.platform == "win32":
             while True:
+                # Check if we already have a message in the buffer before
+                # receiving any more data from the socket.
+                bdata = self.frame_from_buffer()
+                if bdata is not None:
+                    break
+
+                # Receive more data into the buffer.
                 ov, err = _winapi.ReadFile(self.connection, size, overlapped=True)
                 try:
                     if err == _winapi.ERROR_IO_PENDING:
@@ -66,7 +89,10 @@ def read(self, size: int = 100000) -> bytes:
                 _, err = ov.GetOverlappedResult(True)
                 more = ov.getbuffer()
                 if more:
-                    bdata.extend(more)
+                    self.buffer.extend(more)
+                    bdata = self.frame_from_buffer()
+                    if bdata is not None:
+                        break
                 if err == 0:
                     # we are done!
                     break
@@ -77,17 +103,34 @@ def read(self, size: int = 100000) -> bytes:
                     raise IPCException("ReadFile operation aborted.")
         else:
             while True:
+                # Check if we already have a message in the buffer before
+                # receiving any more data from the socket.
+                bdata = self.frame_from_buffer()
+                if bdata is not None:
+                    break
+
+                # Receive more data into the buffer.
                 more = self.connection.recv(size)
                 if not more:
+                    # Connection closed
                     break
-                bdata.extend(more)
-        return bytes(bdata)
+                self.buffer.extend(more)
+
+        if not bdata:
+            # Socket was empty and we didn't get any frame.
+            # This should only happen if the socket was closed.
+            return ""
+        return codecs.decode(bdata, "base64").decode("utf8")
+
+    def write(self, data: str) -> None:
+        """Write to an IPC connection."""
+
+        # Frame the data by urlencoding it and separating by space.
+        encoded_data = codecs.encode(data.encode("utf8"), "base64") + b" "
 
-    def write(self, data: bytes) -> None:
-        """Write bytes to an IPC connection."""
         if sys.platform == "win32":
             try:
-                ov, err = _winapi.WriteFile(self.connection, data, overlapped=True)
+                ov, err = _winapi.WriteFile(self.connection, encoded_data, overlapped=True)
                 try:
                     if err == _winapi.ERROR_IO_PENDING:
                         timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE
@@ -101,12 +144,11 @@ def write(self, data: bytes) -> None:
                     raise
                 bytes_written, err = ov.GetOverlappedResult(True)
                 assert err == 0, err
-                assert bytes_written == len(data)
+                assert bytes_written == len(encoded_data)
             except OSError as e:
                 raise IPCException(f"Failed to write with error: {e.winerror}") from e
         else:
-            self.connection.sendall(data)
-            self.connection.shutdown(socket.SHUT_WR)
+            self.connection.sendall(encoded_data)
 
     def close(self) -> None:
         if sys.platform == "win32":
diff --git a/mypy/test/testipc.py b/mypy/test/testipc.py
index 9034f514bb45..8ef656dc4579 100644
--- a/mypy/test/testipc.py
+++ b/mypy/test/testipc.py
@@ -15,14 +15,25 @@
 def server(msg: str, q: Queue[str]) -> None:
     server = IPCServer(CONNECTION_NAME)
     q.put(server.connection_name)
-    data = b""
+    data = ""
     while not data:
         with server:
-            server.write(msg.encode())
+            server.write(msg)
             data = server.read()
     server.cleanup()
 
 
+def server_multi_message_echo(q: Queue[str]) -> None:
+    server = IPCServer(CONNECTION_NAME)
+    q.put(server.connection_name)
+    data = ""
+    with server:
+        while data != "quit":
+            data = server.read()
+            server.write(data)
+    server.cleanup()
+
+
 class IPCTests(TestCase):
     def test_transaction_large(self) -> None:
         queue: Queue[str] = Queue()
@@ -31,8 +42,8 @@ def test_transaction_large(self) -> None:
         p.start()
         connection_name = queue.get()
         with IPCClient(connection_name, timeout=1) as client:
-            assert client.read() == msg.encode()
-            client.write(b"test")
+            assert client.read() == msg
+            client.write("test")
         queue.close()
         queue.join_thread()
         p.join()
@@ -44,12 +55,37 @@ def test_connect_twice(self) -> None:
         p.start()
         connection_name = queue.get()
         with IPCClient(connection_name, timeout=1) as client:
-            assert client.read() == msg.encode()
-            client.write(b"")  # don't let the server hang up yet, we want to connect again.
+            assert client.read() == msg
+            client.write("")  # don't let the server hang up yet, we want to connect again.
 
         with IPCClient(connection_name, timeout=1) as client:
-            assert client.read() == msg.encode()
-            client.write(b"test")
+            assert client.read() == msg
+            client.write("test")
+        queue.close()
+        queue.join_thread()
+        p.join()
+        assert p.exitcode == 0
+
+    def test_multiple_messages(self) -> None:
+        queue: Queue[str] = Queue()
+        p = Process(target=server_multi_message_echo, args=(queue,), daemon=True)
+        p.start()
+        connection_name = queue.get()
+        with IPCClient(connection_name, timeout=1) as client:
+            # "foo bar" with extra accents on letters.
+            # In UTF-8 encoding so we don't confuse editors opening this file.
+            fancy_text = b"f\xcc\xb6o\xcc\xb2\xf0\x9d\x91\x9c \xd0\xb2\xe2\xb7\xa1a\xcc\xb6r\xcc\x93\xcd\x98\xcd\x8c"
+            client.write(fancy_text.decode("utf-8"))
+            assert client.read() == fancy_text.decode("utf-8")
+
+            client.write("Test with spaces")
+            client.write("Test write before reading previous")
+            time.sleep(0)  # yield to the server to force reading of all messages by server.
+            assert client.read() == "Test with spaces"
+            assert client.read() == "Test write before reading previous"
+
+            client.write("quit")
+            assert client.read() == "quit"
         queue.close()
         queue.join_thread()
         p.join()

From 85f40b5c8479cbca1d30f912fb95aa243b09c334 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 16 Oct 2023 21:18:55 +0100
Subject: [PATCH 179/288] Correctly handle variadic instances with empty
 arguments (#16238)

Fixes https://github.com/python/mypy/issues/16199

It was surprisingly hard to fix, because all possible fixes strongly
interfered with the code that makes "no-args" aliases possible:
```python
l = list
x: l[int]  # OK, same as list[int]
```
So after all I re-organized (and actually simplified) that old code.
---
 mypy/checkexpr.py                       |  5 ++-
 mypy/expandtype.py                      |  2 +-
 mypy/messages.py                        |  6 ++-
 mypy/semanal.py                         | 28 +++++++++----
 mypy/subtypes.py                        |  4 +-
 mypy/typeanal.py                        | 54 +++++++------------------
 mypy/types.py                           |  2 +
 test-data/unit/check-flags.test         | 19 +++++++++
 test-data/unit/check-typevar-tuple.test | 49 +++++++++++++++++++++-
 9 files changed, 116 insertions(+), 53 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index a1dd6d830758..a5c8c80e1580 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -4662,7 +4662,10 @@ class C(Generic[T, Unpack[Ts]]): ...
         info = t.type_object()
         # We reuse the logic from semanal phase to reduce code duplication.
         fake = Instance(info, args, line=ctx.line, column=ctx.column)
-        if not validate_instance(fake, self.chk.fail):
+        # This code can be only called either from checking a type application, or from
+        # checking a type alias (after the caller handles no_args aliases), so we know it
+        # was initially an IndexExpr, and we allow empty tuple type arguments.
+        if not validate_instance(fake, self.chk.fail, empty_tuple_index=True):
             fix_instance(
                 fake, self.chk.fail, self.chk.note, disallow_any=False, options=self.chk.options
             )
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 44716e6da013..cb09a1ee99f5 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -89,7 +89,7 @@ def expand_type_by_instance(typ: Type, instance: Instance) -> Type:
 def expand_type_by_instance(typ: Type, instance: Instance) -> Type:
     """Substitute type variables in type using values from an Instance.
     Type variables are considered to be bound by the class declaration."""
-    if not instance.args:
+    if not instance.args and not instance.type.has_type_var_tuple_type:
         return typ
     else:
         variables: dict[TypeVarId, Type] = {}
diff --git a/mypy/messages.py b/mypy/messages.py
index 5d03bf1babb9..dc5056f616ea 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -2505,8 +2505,10 @@ def format_literal_value(typ: LiteralType) -> str:
         else:
             base_str = itype.type.name
         if not itype.args:
-            # No type arguments, just return the type name
-            return base_str
+            if not itype.type.has_type_var_tuple_type:
+                # No type arguments, just return the type name
+                return base_str
+            return base_str + "[()]"
         elif itype.type.fullname == "builtins.tuple":
             item_type_str = format(itype.args[0])
             return f"{'tuple' if options.use_lowercase_names() else 'Tuple'}[{item_type_str}, ...]"
diff --git a/mypy/semanal.py b/mypy/semanal.py
index a476b62b31ec..1111b1df50e9 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -231,10 +231,11 @@
     check_for_explicit_any,
     detect_diverging_alias,
     find_self_type,
-    fix_instance_types,
+    fix_instance,
     has_any_from_unimported_type,
     no_subscript_builtin_alias,
     type_constructors,
+    validate_instance,
 )
 from mypy.typeops import function_type, get_type_vars, try_getting_str_literals_from_type
 from mypy.types import (
@@ -722,7 +723,9 @@ def create_alias(self, tree: MypyFile, target_name: str, alias: str, name: str)
                 target = self.named_type_or_none(target_name, [])
                 assert target is not None
                 # Transform List to List[Any], etc.
-                fix_instance_types(target, self.fail, self.note, self.options)
+                fix_instance(
+                    target, self.fail, self.note, disallow_any=False, options=self.options
+                )
                 alias_node = TypeAlias(
                     target,
                     alias,
@@ -3455,7 +3458,7 @@ def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Typ
 
     def analyze_alias(
         self, name: str, rvalue: Expression, allow_placeholder: bool = False
-    ) -> tuple[Type | None, list[TypeVarLikeType], set[str], list[str]]:
+    ) -> tuple[Type | None, list[TypeVarLikeType], set[str], list[str], bool]:
         """Check if 'rvalue' is a valid type allowed for aliasing (e.g. not a type variable).
 
         If yes, return the corresponding type, a list of
@@ -3474,7 +3477,7 @@ def analyze_alias(
             self.fail(
                 "Invalid type alias: expression is not a valid type", rvalue, code=codes.VALID_TYPE
             )
-            return None, [], set(), []
+            return None, [], set(), [], False
 
         found_type_vars = typ.accept(TypeVarLikeQuery(self, self.tvar_scope))
         tvar_defs: list[TypeVarLikeType] = []
@@ -3508,7 +3511,8 @@ def analyze_alias(
             new_tvar_defs.append(td)
 
         qualified_tvars = [node.fullname for _name, node in found_type_vars]
-        return analyzed, new_tvar_defs, depends_on, qualified_tvars
+        empty_tuple_index = typ.empty_tuple_index if isinstance(typ, UnboundType) else False
+        return analyzed, new_tvar_defs, depends_on, qualified_tvars, empty_tuple_index
 
     def is_pep_613(self, s: AssignmentStmt) -> bool:
         if s.unanalyzed_type is not None and isinstance(s.unanalyzed_type, UnboundType):
@@ -3591,9 +3595,10 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool:
             alias_tvars: list[TypeVarLikeType] = []
             depends_on: set[str] = set()
             qualified_tvars: list[str] = []
+            empty_tuple_index = False
         else:
             tag = self.track_incomplete_refs()
-            res, alias_tvars, depends_on, qualified_tvars = self.analyze_alias(
+            res, alias_tvars, depends_on, qualified_tvars, empty_tuple_index = self.analyze_alias(
                 lvalue.name, rvalue, allow_placeholder=True
             )
             if not res:
@@ -3626,8 +3631,15 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool:
         # Note: with the new (lazy) type alias representation we only need to set no_args to True
         # if the expected number of arguments is non-zero, so that aliases like A = List work.
         # However, eagerly expanding aliases like Text = str is a nice performance optimization.
-        no_args = isinstance(res, Instance) and not res.args  # type: ignore[misc]
-        fix_instance_types(res, self.fail, self.note, self.options)
+        no_args = (
+            isinstance(res, ProperType)
+            and isinstance(res, Instance)
+            and not res.args
+            and not empty_tuple_index
+        )
+        if isinstance(res, ProperType) and isinstance(res, Instance):
+            if not validate_instance(res, self.fail, empty_tuple_index):
+                fix_instance(res, self.fail, self.note, disallow_any=False, options=self.options)
         # Aliases defined within functions can't be accessed outside
         # the function, since the symbol table will no longer
         # exist. Work around by expanding them eagerly when used.
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 822c4b0ebf32..638553883dd8 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -544,7 +544,7 @@ def visit_instance(self, left: Instance) -> bool:
                     right_args = (
                         right_prefix + (TupleType(list(right_middle), fallback),) + right_suffix
                     )
-                    if not self.proper_subtype:
+                    if not self.proper_subtype and t.args:
                         for arg in map(get_proper_type, t.args):
                             if isinstance(arg, UnpackType):
                                 unpacked = get_proper_type(arg.type)
@@ -557,6 +557,8 @@ def visit_instance(self, left: Instance) -> bool:
                                 break
                         else:
                             return True
+                    if len(left_args) != len(right_args):
+                        return False
                     type_params = zip(left_args, right_args, right.type.defn.type_vars)
                 else:
                     type_params = zip(t.args, right.args, right.type.defn.type_vars)
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 385c5d35d67f..4743126c3d56 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -62,6 +62,7 @@
     ParamSpecType,
     PartialType,
     PlaceholderType,
+    ProperType,
     RawExpressionType,
     RequiredType,
     SyntheticTypeVisitor,
@@ -89,7 +90,6 @@
     has_type_vars,
 )
 from mypy.types_utils import is_bad_type_type_item
-from mypy.typetraverser import TypeTraverserVisitor
 from mypy.typevars import fill_typevars
 
 T = TypeVar("T")
@@ -425,9 +425,10 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool)
                 # The only case where instantiate_type_alias() can return an incorrect instance is
                 # when it is top-level instance, so no need to recurse.
                 if (
-                    isinstance(res, Instance)  # type: ignore[misc]
-                    and not self.defining_alias
-                    and not validate_instance(res, self.fail)
+                    isinstance(res, ProperType)
+                    and isinstance(res, Instance)
+                    and not (self.defining_alias and self.nesting_level == 0)
+                    and not validate_instance(res, self.fail, t.empty_tuple_index)
                 ):
                     fix_instance(
                         res,
@@ -442,7 +443,7 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool)
                     res = get_proper_type(res)
                 return res
             elif isinstance(node, TypeInfo):
-                return self.analyze_type_with_type_info(node, t.args, t)
+                return self.analyze_type_with_type_info(node, t.args, t, t.empty_tuple_index)
             elif node.fullname in TYPE_ALIAS_NAMES:
                 return AnyType(TypeOfAny.special_form)
             # Concatenate is an operator, no need for a proper type
@@ -700,7 +701,7 @@ def get_omitted_any(self, typ: Type, fullname: str | None = None) -> AnyType:
         return get_omitted_any(disallow_any, self.fail, self.note, typ, self.options, fullname)
 
     def analyze_type_with_type_info(
-        self, info: TypeInfo, args: Sequence[Type], ctx: Context
+        self, info: TypeInfo, args: Sequence[Type], ctx: Context, empty_tuple_index: bool
     ) -> Type:
         """Bind unbound type when were able to find target TypeInfo.
 
@@ -735,7 +736,9 @@ def analyze_type_with_type_info(
 
         # Check type argument count.
         instance.args = tuple(flatten_nested_tuples(instance.args))
-        if not self.defining_alias and not validate_instance(instance, self.fail):
+        if not (self.defining_alias and self.nesting_level == 0) and not validate_instance(
+            instance, self.fail, empty_tuple_index
+        ):
             fix_instance(
                 instance,
                 self.fail,
@@ -1203,7 +1206,7 @@ def visit_placeholder_type(self, t: PlaceholderType) -> Type:
         else:
             # TODO: Handle non-TypeInfo
             assert isinstance(n.node, TypeInfo)
-            return self.analyze_type_with_type_info(n.node, t.args, t)
+            return self.analyze_type_with_type_info(n.node, t.args, t, False)
 
     def analyze_callable_args_for_paramspec(
         self, callable_args: Type, ret_type: Type, fallback: Instance
@@ -2256,7 +2259,7 @@ def make_optional_type(t: Type) -> Type:
         return UnionType([t, NoneType()], t.line, t.column)
 
 
-def validate_instance(t: Instance, fail: MsgCallback) -> bool:
+def validate_instance(t: Instance, fail: MsgCallback, empty_tuple_index: bool) -> bool:
     """Check if this is a well-formed instance with respect to argument count/positions."""
     # TODO: combine logic with instantiate_type_alias().
     if any(unknown_unpack(a) for a in t.args):
@@ -2279,8 +2282,9 @@ def validate_instance(t: Instance, fail: MsgCallback) -> bool:
             )
             return False
         elif not t.args:
-            # The Any arguments should be set by the caller.
-            return False
+            if not (empty_tuple_index and len(t.type.type_vars) == 1):
+                # The Any arguments should be set by the caller.
+                return False
         else:
             # We also need to check if we are not performing a type variable tuple split.
             unpack = find_unpack_in_list(t.args)
@@ -2313,34 +2317,6 @@ def validate_instance(t: Instance, fail: MsgCallback) -> bool:
     return True
 
 
-def fix_instance_types(t: Type, fail: MsgCallback, note: MsgCallback, options: Options) -> None:
-    """Recursively fix all instance types (type argument count) in a given type.
-
-    For example 'Union[Dict, List[str, int]]' will be transformed into
-    'Union[Dict[Any, Any], List[Any]]' in place.
-    """
-    t.accept(InstanceFixer(fail, note, options))
-
-
-class InstanceFixer(TypeTraverserVisitor):
-    def __init__(self, fail: MsgCallback, note: MsgCallback, options: Options) -> None:
-        self.fail = fail
-        self.note = note
-        self.options = options
-
-    def visit_instance(self, typ: Instance) -> None:
-        super().visit_instance(typ)
-        if not validate_instance(typ, self.fail):
-            fix_instance(
-                typ,
-                self.fail,
-                self.note,
-                disallow_any=False,
-                options=self.options,
-                use_generic_error=True,
-            )
-
-
 def find_self_type(typ: Type, lookup: Callable[[str], SymbolTableNode | None]) -> bool:
     return typ.accept(HasSelfType(lookup))
 
diff --git a/mypy/types.py b/mypy/types.py
index 09ba68aae88a..ea81609fc605 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -3163,6 +3163,8 @@ def visit_instance(self, t: Instance) -> str:
                 s += f"[{self.list_str(t.args)}, ...]"
             else:
                 s += f"[{self.list_str(t.args)}]"
+        elif t.type.has_type_var_tuple_type and len(t.type.type_vars) == 1:
+            s += "[()]"
         if self.id_mapper:
             s += f"<{self.id_mapper.id(t.type)}>"
         return s
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
index 06b7cab8391b..546d02a07ad0 100644
--- a/test-data/unit/check-flags.test
+++ b/test-data/unit/check-flags.test
@@ -2277,3 +2277,22 @@ list(2)  # E: No overload variant of "list" matches argument type "int"  [call-o
          # N:     def [T] __init__(self) -> List[T] \
          # N:     def [T] __init__(self, x: Iterable[T]) -> List[T]
 [builtins fixtures/list.pyi]
+
+[case testNestedGenericInAliasDisallow]
+# flags: --disallow-any-generics
+from typing import TypeVar, Generic, List, Union
+
+class C(Generic[T]): ...
+
+A = Union[C, List]  # E: Missing type parameters for generic type "C" \
+                    # E: Missing type parameters for generic type "List"
+[builtins fixtures/list.pyi]
+
+[case testNestedGenericInAliasAllow]
+# flags: --allow-any-generics
+from typing import TypeVar, Generic, List, Union
+
+class C(Generic[T]): ...
+
+A = Union[C, List]  # OK
+[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 22a30432d098..4a281fbf0b49 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -118,7 +118,10 @@ variadic_single: Variadic[int]
 reveal_type(variadic_single)  # N: Revealed type is "__main__.Variadic[builtins.int]"
 
 empty: Variadic[()]
-reveal_type(empty)  # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[Any, ...]]]"
+reveal_type(empty)  # N: Revealed type is "__main__.Variadic[()]"
+
+omitted: Variadic
+reveal_type(omitted)  # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[Any, ...]]]"
 
 bad: Variadic[Unpack[Tuple[int, ...]], str, Unpack[Tuple[bool, ...]]]  # E: More than one Unpack in a type is not allowed
 reveal_type(bad)  # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[builtins.int, ...]], builtins.str]"
@@ -1846,6 +1849,50 @@ def foo3(func: Callable[[int, Unpack[Args2]], T], *args: Unpack[Args2]) -> T:
    return submit(func, 1, *args)
 [builtins fixtures/tuple.pyi]
 
+[case testTypeVarTupleEmptySpecialCase]
+from typing import Any, Callable, Generic
+from typing_extensions import Unpack, TypeVarTuple
+
+Ts = TypeVarTuple("Ts")
+class MyClass(Generic[Unpack[Ts]]):
+    func: Callable[[Unpack[Ts]], object]
+
+    def __init__(self, func: Callable[[Unpack[Ts]], object]) -> None:
+        self.func = func
+
+explicit: MyClass[()]
+reveal_type(explicit)  # N: Revealed type is "__main__.MyClass[()]"
+reveal_type(explicit.func)  # N: Revealed type is "def () -> builtins.object"
+
+a: Any
+explicit_2 = MyClass[()](a)
+reveal_type(explicit_2)  # N: Revealed type is "__main__.MyClass[()]"
+reveal_type(explicit_2.func)  # N: Revealed type is "def () -> builtins.object"
+
+Alias = MyClass[()]
+explicit_3: Alias
+reveal_type(explicit_3)  # N: Revealed type is "__main__.MyClass[()]"
+reveal_type(explicit_3.func)  # N: Revealed type is "def () -> builtins.object"
+
+explicit_4 = Alias(a)
+reveal_type(explicit_4)  # N: Revealed type is "__main__.MyClass[()]"
+reveal_type(explicit_4.func)  # N: Revealed type is "def () -> builtins.object"
+
+def no_args() -> None: ...
+implicit = MyClass(no_args)
+reveal_type(implicit)  # N: Revealed type is "__main__.MyClass[()]"
+reveal_type(implicit.func)  # N: Revealed type is "def () -> builtins.object"
+
+def one_arg(__a: int) -> None: ...
+x = MyClass(one_arg)
+x = explicit  # E: Incompatible types in assignment (expression has type "MyClass[()]", variable has type "MyClass[int]")
+
+# Consistently handle special case for no argument aliases
+Direct = MyClass
+y = Direct(one_arg)
+reveal_type(y)  # N: Revealed type is "__main__.MyClass[builtins.int]"
+[builtins fixtures/tuple.pyi]
+
 [case testTypeVarTupleRuntimeTypeApplication]
 from typing import Generic, TypeVar, Tuple
 from typing_extensions import Unpack, TypeVarTuple

From f5a3e233c99077317c4cf6fee7745686d67fd21b Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Tue, 17 Oct 2023 13:23:05 +0300
Subject: [PATCH 180/288] Bump test deps: `ruff` and `pre-commit-hooks`
 (#16273)

Release post: https://astral.sh/blog/ruff-v0.1.0
---
 .pre-commit-config.yaml | 4 ++--
 test-requirements.txt   | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index e92d498fa3cc..bd2a09b7a8cf 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,7 +1,7 @@
 exclude: '^(mypyc/external/)|(mypy/typeshed/)'  # Exclude all vendored code from lints
 repos:
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.4.0  # must match test-requirements.txt
+    rev: v4.5.0  # must match test-requirements.txt
     hooks:
       - id: trailing-whitespace
       - id: end-of-file-fixer
@@ -10,7 +10,7 @@ repos:
     hooks:
       - id: black
   - repo: https://github.com/astral-sh/ruff-pre-commit
-    rev: v0.0.292  # must match test-requirements.txt
+    rev: v0.1.0  # must match test-requirements.txt
     hooks:
       - id: ruff
         args: [--exit-non-zero-on-fix]
diff --git a/test-requirements.txt b/test-requirements.txt
index bdaad16fa88e..a1fa98917872 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -6,11 +6,11 @@ filelock>=3.3.0
 # lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014
 lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12'
 pre-commit
-pre-commit-hooks==4.4.0
+pre-commit-hooks==4.5.0
 psutil>=4.0
 pytest>=7.4.0
 pytest-xdist>=1.34.0
 pytest-cov>=2.10.0
-ruff==0.0.292  # must match version in .pre-commit-config.yaml
+ruff==0.1.0  # must match version in .pre-commit-config.yaml
 setuptools>=65.5.1
 tomli>=1.1.0  # needed even on py311+ so the self check passes with --python-version 3.7

From 4a9e6e60884c0bab89eb2ec6e947373c871f8aee Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Tue, 17 Oct 2023 15:46:34 +0100
Subject: [PATCH 181/288] Attempt to fix daemon crash related to ABCs (#16275)

I couldn't reproduce the crash with a small example, but this seems to
fix this crash in a large codebase:
```
Traceback (most recent call last):
  File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 234, in serve
    resp = self.run_command(command, data)
  File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 281, in run_command
    ret = method(self, **data)
  File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 359, in cmd_check
    return self.check(sources, export_types, is_tty, terminal_width)
  File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 413, in check
    res = self.initialize_fine_grained(sources, is_tty, terminal_width)
  File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 498, in initialize_fine_grained
    messages = self.fine_grained_manager.update(changed, removed)
  File "/Users/jukka/src/mypy/mypy/server/update.py", line 267, in update
    result = self.update_one(
  File "/Users/jukka/src/mypy/mypy/server/update.py", line 369, in update_one
    result = self.update_module(next_id, next_path, next_id in removed_set, followed)
  File "/Users/jukka/src/mypy/mypy/server/update.py", line 431, in update_module
    result = update_module_isolated(
  File "/Users/jukka/src/mypy/mypy/server/update.py", line 667, in update_module_isolated
    state.type_check_first_pass()
  File "/Users/jukka/src/mypy/mypy/build.py", line 2306, in type_check_first_pass
    self.type_checker().check_first_pass()
  File "/Users/jukka/src/mypy/mypy/checker.py", line 475, in check_first_pass
    self.accept(d)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 587, in accept
    report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options)
  File "/Users/jukka/src/mypy/mypy/errors.py", line 1261, in report_internal_error
    raise err
  File "/Users/jukka/src/mypy/mypy/checker.py", line 585, in accept
    stmt.accept(self)
  File "/Users/jukka/src/mypy/mypy/nodes.py", line 900, in accept
    return visitor.visit_decorator(self)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 4773, in visit_decorator
    self.visit_decorator_inner(e)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 4778, in visit_decorator_inner
    self.check_func_item(e.func, name=e.func.name, allow_empty=allow_empty)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 1071, in check_func_item
    self.check_func_def(defn, typ, name, allow_empty)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 1281, in check_func_def
    self.accept(item.body)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 587, in accept
    report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options)
  File "/Users/jukka/src/mypy/mypy/errors.py", line 1261, in report_internal_error
    raise err
  File "/Users/jukka/src/mypy/mypy/checker.py", line 585, in accept
    stmt.accept(self)
  File "/Users/jukka/src/mypy/mypy/nodes.py", line 1226, in accept
    return visitor.visit_block(self)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 2754, in visit_block
    self.accept(s)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 587, in accept
    report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options)
  File "/Users/jukka/src/mypy/mypy/errors.py", line 1261, in report_internal_error
    raise err
  File "/Users/jukka/src/mypy/mypy/checker.py", line 585, in accept
    stmt.accept(self)
  File "/Users/jukka/src/mypy/mypy/nodes.py", line 1313, in accept
    return visitor.visit_assignment_stmt(self)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 2802, in visit_assignment_stmt
    self.check_assignment(s.lvalues[-1], s.rvalue, s.type is None, s.new_syntax)
  File "/Users/jukka/src/mypy/mypy/checker.py", line 3009, in check_assignment
    rvalue_type = self.expr_checker.accept(rvalue, type_context=type_context)
  File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 5372, in accept
    report_internal_error(
  File "/Users/jukka/src/mypy/mypy/errors.py", line 1261, in report_internal_error
    raise err
  File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 5370, in accept
    typ = node.accept(self)
  File "/Users/jukka/src/mypy/mypy/nodes.py", line 1907, in accept
    return visitor.visit_call_expr(self)
  File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 452, in visit_call_expr
    return self.visit_call_expr_inner(e, allow_none_return=allow_none_return)
  File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 581, in visit_call_expr_inner
    ret_type = self.check_call_expr_with_callee_type(
  File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 1420, in check_call_expr_with_callee_type
    ret_type, callee_type = self.check_call(
  File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 1514, in check_call
    return self.check_callable_call(
  File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 1638, in check_callable_call
    self.msg.cannot_instantiate_abstract_class(
  File "/Users/jukka/src/mypy/mypy/messages.py", line 1479, in cannot_instantiate_abstract_class
    attrs = format_string_list([f'"{a}"' for a in abstract_attributes])
  File "/Users/jukka/src/mypy/mypy/messages.py", line 2948, in format_string_list
    assert lst
AssertionError
```

I suspect that we first set `is_abstract` to true, and later the class
was no longer abstract and `abstract_attributes` got cleared, but
`is_abstract` was stuck at true.
---
 mypy/semanal_classprop.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py
index dfd4e5b6f122..b5f1b2181761 100644
--- a/mypy/semanal_classprop.py
+++ b/mypy/semanal_classprop.py
@@ -46,6 +46,8 @@ def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: E
     abstract attribute.  Also compute a list of abstract attributes.
     Report error is required ABCMeta metaclass is missing.
     """
+    typ.is_abstract = False
+    typ.abstract_attributes = []
     if typ.typeddict_type:
         return  # TypedDict can't be abstract
     concrete: set[str] = set()
@@ -56,7 +58,6 @@ def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: E
         # Special case: NewTypes are considered as always non-abstract, so they can be used as:
         #     Config = NewType('Config', Mapping[str, str])
         #     default = Config({'cannot': 'modify'})  # OK
-        typ.abstract_attributes = []
         return
     for base in typ.mro:
         for name, symnode in base.names.items():

From f3bdf5caaf6ccbba6c5df21b483fb9b716f13851 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 18 Oct 2023 04:40:46 +0100
Subject: [PATCH 182/288] Support fancy new syntax for variadic types (#16242)

This is the last significant thing I am aware of that is needed for PEP
646 support. After this and other currently open PRs are merged, I will
make an additional pass grepping for usual suspects and verifying we
didn't miss anything. Then we can flip the switch and announce this as
supported.
---
 mypy/exprtotype.py                  |  5 ++-
 mypy/fastparse.py                   |  8 +---
 mypy/messages.py                    |  2 +
 mypy/options.py                     |  3 ++
 mypy/semanal.py                     | 56 ++++++++++++++-----------
 mypy/typeanal.py                    |  5 ++-
 test-data/unit/check-python311.test | 65 +++++++++++++++++++++++++++++
 test-data/unit/check-python312.test |  2 -
 8 files changed, 111 insertions(+), 35 deletions(-)

diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py
index 5f0ef79acbd7..7a50429b81d1 100644
--- a/mypy/exprtotype.py
+++ b/mypy/exprtotype.py
@@ -103,7 +103,10 @@ def expr_to_unanalyzed_type(
                 return expr_to_unanalyzed_type(args[0], options, allow_new_syntax, expr)
             else:
                 base.args = tuple(
-                    expr_to_unanalyzed_type(arg, options, allow_new_syntax, expr) for arg in args
+                    expr_to_unanalyzed_type(
+                        arg, options, allow_new_syntax, expr, allow_unpack=True
+                    )
+                    for arg in args
                 )
             if not base.args:
                 base.empty_tuple_index = True
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index fe158d468ce8..95d99db84a15 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -1762,7 +1762,6 @@ def __init__(
         self.override_column = override_column
         self.node_stack: list[AST] = []
         self.is_evaluated = is_evaluated
-        self.allow_unpack = False
 
     def convert_column(self, column: int) -> int:
         """Apply column override if defined; otherwise return column.
@@ -2039,19 +2038,14 @@ def visit_Attribute(self, n: Attribute) -> Type:
         else:
             return self.invalid_type(n)
 
-    # Used for Callable[[X *Ys, Z], R]
+    # Used for Callable[[X *Ys, Z], R] etc.
     def visit_Starred(self, n: ast3.Starred) -> Type:
         return UnpackType(self.visit(n.value), from_star_syntax=True)
 
     # List(expr* elts, expr_context ctx)
     def visit_List(self, n: ast3.List) -> Type:
         assert isinstance(n.ctx, ast3.Load)
-        old_allow_unpack = self.allow_unpack
-        # We specifically only allow starred expressions in a list to avoid
-        # confusing errors for top-level unpacks (e.g. in base classes).
-        self.allow_unpack = True
         result = self.translate_argument_list(n.elts)
-        self.allow_unpack = old_allow_unpack
         return result
 
 
diff --git a/mypy/messages.py b/mypy/messages.py
index dc5056f616ea..19aafedd5586 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -2516,6 +2516,8 @@ def format_literal_value(typ: LiteralType) -> str:
             # There are type arguments. Convert the arguments to strings.
             return f"{base_str}[{format_list(itype.args)}]"
     elif isinstance(typ, UnpackType):
+        if options.use_star_unpack():
+            return f"*{format(typ.type)}"
         return f"Unpack[{format(typ.type)}]"
     elif isinstance(typ, TypeVarType):
         # This is similar to non-generic instance types.
diff --git a/mypy/options.py b/mypy/options.py
index 007ae0a78aa1..603ba79935ee 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -385,6 +385,9 @@ def use_or_syntax(self) -> bool:
             return not self.force_union_syntax
         return False
 
+    def use_star_unpack(self) -> bool:
+        return self.python_version >= (3, 11)
+
     # To avoid breaking plugin compatibility, keep providing new_semantic_analyzer
     @property
     def new_semantic_analyzer(self) -> bool:
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 1111b1df50e9..9c2452252208 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -1992,38 +1992,42 @@ def analyze_class_typevar_declaration(self, base: Type) -> tuple[TypeVarLikeList
         return None
 
     def analyze_unbound_tvar(self, t: Type) -> tuple[str, TypeVarLikeExpr] | None:
-        if not isinstance(t, UnboundType):
-            return None
-        unbound = t
-        sym = self.lookup_qualified(unbound.name, unbound)
+        if isinstance(t, UnpackType) and isinstance(t.type, UnboundType):
+            return self.analyze_unbound_tvar_impl(t.type, allow_tvt=True)
+        if isinstance(t, UnboundType):
+            sym = self.lookup_qualified(t.name, t)
+            if sym and sym.fullname in ("typing.Unpack", "typing_extensions.Unpack"):
+                inner_t = t.args[0]
+                if isinstance(inner_t, UnboundType):
+                    return self.analyze_unbound_tvar_impl(inner_t, allow_tvt=True)
+                return None
+            return self.analyze_unbound_tvar_impl(t)
+        return None
+
+    def analyze_unbound_tvar_impl(
+        self, t: UnboundType, allow_tvt: bool = False
+    ) -> tuple[str, TypeVarLikeExpr] | None:
+        sym = self.lookup_qualified(t.name, t)
         if sym and isinstance(sym.node, PlaceholderNode):
             self.record_incomplete_ref()
-        if sym and isinstance(sym.node, ParamSpecExpr):
+        if not allow_tvt and sym and isinstance(sym.node, ParamSpecExpr):
             if sym.fullname and not self.tvar_scope.allow_binding(sym.fullname):
                 # It's bound by our type variable scope
                 return None
-            return unbound.name, sym.node
-        if sym and sym.fullname in ("typing.Unpack", "typing_extensions.Unpack"):
-            inner_t = unbound.args[0]
-            if not isinstance(inner_t, UnboundType):
+            return t.name, sym.node
+        if allow_tvt and sym and isinstance(sym.node, TypeVarTupleExpr):
+            if sym.fullname and not self.tvar_scope.allow_binding(sym.fullname):
+                # It's bound by our type variable scope
                 return None
-            inner_unbound = inner_t
-            inner_sym = self.lookup_qualified(inner_unbound.name, inner_unbound)
-            if inner_sym and isinstance(inner_sym.node, PlaceholderNode):
-                self.record_incomplete_ref()
-            if inner_sym and isinstance(inner_sym.node, TypeVarTupleExpr):
-                if inner_sym.fullname and not self.tvar_scope.allow_binding(inner_sym.fullname):
-                    # It's bound by our type variable scope
-                    return None
-                return inner_unbound.name, inner_sym.node
-        if sym is None or not isinstance(sym.node, TypeVarExpr):
+            return t.name, sym.node
+        if sym is None or not isinstance(sym.node, TypeVarExpr) or allow_tvt:
             return None
         elif sym.fullname and not self.tvar_scope.allow_binding(sym.fullname):
             # It's bound by our type variable scope
             return None
         else:
             assert isinstance(sym.node, TypeVarExpr)
-            return unbound.name, sym.node
+            return t.name, sym.node
 
     def get_all_bases_tvars(
         self, base_type_exprs: list[Expression], removed: list[int]
@@ -5333,7 +5337,9 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None:
                 has_param_spec = False
                 num_args = -1
         elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo):
-            allow_unpack = base.node.has_type_var_tuple_type
+            allow_unpack = (
+                base.node.has_type_var_tuple_type or base.node.fullname == "builtins.tuple"
+            )
             has_param_spec = base.node.has_param_spec_type
             num_args = len(base.node.type_vars)
         else:
@@ -5343,7 +5349,7 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None:
 
         for item in items:
             try:
-                typearg = self.expr_to_unanalyzed_type(item)
+                typearg = self.expr_to_unanalyzed_type(item, allow_unpack=True)
             except TypeTranslationError:
                 self.fail("Type expected within [...]", expr)
                 return None
@@ -6608,8 +6614,10 @@ def type_analyzer(
         tpan.global_scope = not self.type and not self.function_stack
         return tpan
 
-    def expr_to_unanalyzed_type(self, node: Expression) -> ProperType:
-        return expr_to_unanalyzed_type(node, self.options, self.is_stub_file)
+    def expr_to_unanalyzed_type(self, node: Expression, allow_unpack: bool = False) -> ProperType:
+        return expr_to_unanalyzed_type(
+            node, self.options, self.is_stub_file, allow_unpack=allow_unpack
+        )
 
     def anal_type(
         self,
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 4743126c3d56..b16d0ac066b4 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -964,7 +964,10 @@ def visit_unpack_type(self, t: UnpackType) -> Type:
         if not self.allow_unpack:
             self.fail(message_registry.INVALID_UNPACK_POSITION, t.type, code=codes.VALID_TYPE)
             return AnyType(TypeOfAny.from_error)
-        return UnpackType(self.anal_type(t.type), from_star_syntax=t.from_star_syntax)
+        self.allow_type_var_tuple = True
+        result = UnpackType(self.anal_type(t.type), from_star_syntax=t.from_star_syntax)
+        self.allow_type_var_tuple = False
+        return result
 
     def visit_parameters(self, t: Parameters) -> Type:
         raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars")
diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test
index 5870c7e17bcc..37dc3ca0f5b4 100644
--- a/test-data/unit/check-python311.test
+++ b/test-data/unit/check-python311.test
@@ -77,3 +77,68 @@ async def coro() -> Generator[List[Any], None, None]:
 reveal_type(coro)  # N: Revealed type is "def () -> typing.Coroutine[Any, Any, typing.Generator[builtins.list[Any], None, None]]"
 [builtins fixtures/async_await.pyi]
 [typing fixtures/typing-async.pyi]
+
+[case testTypeVarTupleNewSyntaxAnnotations]
+Ints = tuple[int, int, int]
+x: tuple[str, *Ints]
+reveal_type(x)  # N: Revealed type is "Tuple[builtins.str, builtins.int, builtins.int, builtins.int]"
+y: tuple[int, *tuple[int, ...]]
+reveal_type(y)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]"
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleNewSyntaxGenerics]
+from typing import Generic, TypeVar, TypeVarTuple
+
+T = TypeVar("T")
+Ts = TypeVarTuple("Ts")
+class C(Generic[T, *Ts]):
+    attr: tuple[int, *Ts, str]
+
+    def test(self) -> None:
+        reveal_type(self.attr)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`2], builtins.str]"
+        self.attr = ci  # E: Incompatible types in assignment (expression has type "C[*Tuple[int, ...]]", variable has type "Tuple[int, *Ts, str]")
+    def meth(self, *args: *Ts) -> T: ...
+
+ci: C[*tuple[int, ...]]
+reveal_type(ci)  # N: Revealed type is "__main__.C[Unpack[builtins.tuple[builtins.int, ...]]]"
+reveal_type(ci.meth)  # N: Revealed type is "def (*args: builtins.int) -> builtins.int"
+c3: C[str, str, str]
+reveal_type(c3)  # N: Revealed type is "__main__.C[builtins.str, builtins.str, builtins.str]"
+
+A = C[int, *Ts]
+B = tuple[str, *tuple[str, str], str]
+z: A[*B]
+reveal_type(z)  # N: Revealed type is "__main__.C[builtins.int, builtins.str, builtins.str, builtins.str, builtins.str]"
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleNewSyntaxCallables]
+from typing import Generic, overload, TypeVar
+
+T1 = TypeVar("T1")
+T2 = TypeVar("T2")
+class MyClass(Generic[T1, T2]):
+    @overload
+    def __init__(self: MyClass[None, None]) -> None: ...
+
+    @overload
+    def __init__(self: MyClass[T1, None], *types: *tuple[type[T1]]) -> None: ...
+
+    @overload
+    def __init__(self: MyClass[T1, T2], *types: *tuple[type[T1], type[T2]]) -> None: ...
+
+    def __init__(self: MyClass[T1, T2], *types: *tuple[type, ...]) -> None:
+        pass
+
+myclass = MyClass()
+reveal_type(myclass)  # N: Revealed type is "__main__.MyClass[None, None]"
+myclass1 = MyClass(float)
+reveal_type(myclass1)  # N: Revealed type is "__main__.MyClass[builtins.float, None]"
+myclass2 = MyClass(float, float)
+reveal_type(myclass2)  # N: Revealed type is "__main__.MyClass[builtins.float, builtins.float]"
+myclass3 = MyClass(float, float, float)  # E: No overload variant of "MyClass" matches argument types "Type[float]", "Type[float]", "Type[float]" \
+                                         # N: Possible overload variants: \
+                                         # N:     def [T1, T2] __init__(self) -> MyClass[None, None] \
+                                         # N:     def [T1, T2] __init__(self, Type[T1], /) -> MyClass[T1, None] \
+                                         # N:     def [T1, T2] __init__(Type[T1], Type[T2], /) -> MyClass[T1, T2]
+reveal_type(myclass3)  # N: Revealed type is "Any"
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-python312.test b/test-data/unit/check-python312.test
index 91aca7794071..cb89eb34880c 100644
--- a/test-data/unit/check-python312.test
+++ b/test-data/unit/check-python312.test
@@ -41,8 +41,6 @@ type Alias2[**P] = Callable[P, int]  # E: PEP 695 type aliases are not yet suppo
                                      # E: Value of type "int" is not indexable \
                                      # E: Name "P" is not defined
 type Alias3[*Ts] = tuple[*Ts]  # E: PEP 695 type aliases are not yet supported \
-                               # E: Type expected within [...] \
-                               # E: The type "Type[Tuple[Any, ...]]" is not generic and not indexable \
                                # E: Name "Ts" is not defined
 
 class Cls1[T: int]: ...  # E: PEP 695 generics are not yet supported

From ffe89a21058eaa6eb1c1796d9ab87aece965e2d9 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Tue, 17 Oct 2023 20:51:22 -0700
Subject: [PATCH 183/288] Add a changelog (#16280)

I pre-populated it with blog post entries since mypy 1.0. There might be
some markdown or backslashes that are borked, feel free to push to this
PR if you notice anything.
---
 CHANGELOG.md | 1254 ++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 1254 insertions(+)
 create mode 100644 CHANGELOG.md

diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 000000000000..d8237795112b
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,1254 @@
+# Mypy Release Notes
+
+## Unreleased
+
+...
+
+#### Other Notable Changes and Fixes
+...
+
+#### Acknowledgements
+...
+
+## Mypy 1.6
+
+[Tuesday, 10 October 2023](https://mypy-lang.blogspot.com/2023/10/mypy-16-released.html)
+
+We’ve just uploaded mypy 1.6 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:
+
+    python3 -m pip install -U mypy
+
+You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
+
+#### Introduce Error Subcodes for Import Errors
+
+Mypy now uses the error code import-untyped if an import targets an installed library that doesn’t support static type checking, and no stub files are available. Other invalid imports produce the import-not-found error code. They both are subcodes of the import error code, which was previously used for both kinds of import-related errors.
+
+Use \--disable-error-code=import-untyped to only ignore import errors about installed libraries without stubs. This way mypy will still report errors about typos in import statements, for example.
+
+If you use \--warn-unused-ignore or \--strict, mypy will complain if you use \# type: ignore\[import\] to ignore an import error. You are expected to use one of the more specific error codes instead. Otherwise, ignoring the import error code continues to silence both errors.
+
+This feature was contributed by Shantanu (PR [15840](https://github.com/python/mypy/pull/15840), PR [14740](https://github.com/python/mypy/pull/14740)).
+
+#### Remove Support for Targeting Python 3.6 and Earlier
+
+Running mypy with \--python-version 3.6, for example, is no longer supported. Python 3.6 hasn’t been properly supported by mypy for some time now, and this makes it explicit. This was contributed by Nikita Sobolev (PR [15668](https://github.com/python/mypy/pull/15668)).
+
+#### Selective Filtering of \--disallow-untyped-calls Targets
+
+Using \--disallow-untyped-calls could be annoying when using libraries with missing type information, as mypy would generate many errors about code that uses the library. Now you can use \--untyped-calls-exclude=acme, for example, to disable these errors about calls targeting functions defined in the acme package. Refer to the [documentation](https://mypy.readthedocs.io/en/latest/command_line.html#cmdoption-mypy-untyped-calls-exclude) for more information.
+
+This feature was contributed by Ivan Levkivskyi (PR [15845](https://github.com/python/mypy/pull/15845)).
+
+#### Improved Type Inference between Callable Types
+
+Mypy now does a better job inferring type variables inside arguments of callable types. For example, this code fragment now type checks correctly:
+
+```python
+def f(c: Callable[[T, S], None]) -> Callable[[str, T, S], None]: ...
+def g(*x: int) -> None: ...
+
+reveal_type(f(g))  # Callable[[str, int, int], None]
+```
+
+This was contributed by Ivan Levkivskyi (PR [15910](https://github.com/python/mypy/pull/15910)).
+
+#### Don’t Consider None and TypeVar to Overlap in Overloads
+
+Mypy now doesn’t consider an overload item with an argument type None to overlap with a type variable:
+
+```python
+@overload
+def f(x: None) -> None: ..
+@overload
+def f(x: T) -> Foo[T]: ...
+...
+```
+
+Previously mypy would generate an error about the definition of f above. This is slightly unsafe if the upper bound of T is object, since the value of the type variable could be None. We relaxed the rules a little, since this solves a common issue.
+
+This feature was contributed by Ivan Levkivskyi (PR [15846](https://github.com/python/mypy/pull/15846)).
+
+#### Improvements to \--new-type-inference
+
+The experimental new type inference algorithm (polymorphic inference) introduced as an opt-in feature in mypy 1.5 has several improvements:
+
+*   Improve transitive closure computation during constraint solving (Ivan Levkivskyi, PR [15754](https://github.com/python/mypy/pull/15754))
+*   Add support for upper bounds and values with \--new-type-inference (Ivan Levkivskyi, PR [15813](https://github.com/python/mypy/pull/15813))
+*   Basic support for variadic types with \--new-type-inference (Ivan Levkivskyi, PR [15879](https://github.com/python/mypy/pull/15879))
+*   Polymorphic inference: support for parameter specifications and lambdas (Ivan Levkivskyi, PR [15837](https://github.com/python/mypy/pull/15837))
+*   Invalidate cache when adding \--new-type-inference (Marc Mueller, PR [16059](https://github.com/python/mypy/pull/16059))
+
+**Note:** We are planning to enable \--new-type-inference by default in mypy 1.7. Please try this out and let us know if you encounter any issues.
+
+#### ParamSpec Improvements
+
+*   Support self-types containing ParamSpec (Ivan Levkivskyi, PR [15903](https://github.com/python/mypy/pull/15903))
+*   Allow “…” in Concatenate, and clean up ParamSpec literals (Ivan Levkivskyi, PR [15905](https://github.com/python/mypy/pull/15905))
+*   Fix ParamSpec inference for callback protocols (Ivan Levkivskyi, PR [15986](https://github.com/python/mypy/pull/15986))
+*   Infer ParamSpec constraint from arguments (Ivan Levkivskyi, PR [15896](https://github.com/python/mypy/pull/15896))
+*   Fix crash on invalid type variable with ParamSpec (Ivan Levkivskyi, PR [15953](https://github.com/python/mypy/pull/15953))
+*   Fix subtyping between ParamSpecs (Ivan Levkivskyi, PR [15892](https://github.com/python/mypy/pull/15892))
+
+#### Stubgen Improvements
+
+*   Add option to include docstrings with stubgen (chylek, PR [13284](https://github.com/python/mypy/pull/13284))
+*   Add required ... initializer to NamedTuple fields with default values (Nikita Sobolev, PR [15680](https://github.com/python/mypy/pull/15680))
+
+#### Stubtest Improvements
+
+*   Fix \_\_mypy-replace false positives (Alex Waygood, PR [15689](https://github.com/python/mypy/pull/15689))
+*   Fix edge case for bytes enum subclasses (Alex Waygood, PR [15943](https://github.com/python/mypy/pull/15943))
+*   Generate error if typeshed is missing modules from the stdlib (Alex Waygood, PR [15729](https://github.com/python/mypy/pull/15729))
+*   Fixes to new check for missing stdlib modules (Alex Waygood, PR [15960](https://github.com/python/mypy/pull/15960))
+*   Fix stubtest enum.Flag edge case (Alex Waygood, PR [15933](https://github.com/python/mypy/pull/15933))
+
+#### Documentation Improvements
+
+*   Do not advertise to create your own assert\_never helper (Nikita Sobolev, PR [15947](https://github.com/python/mypy/pull/15947))
+*   Fix all the missing references found within the docs (Albert Tugushev, PR [15875](https://github.com/python/mypy/pull/15875))
+*   Document await-not-async error code (Shantanu, PR [15858](https://github.com/python/mypy/pull/15858))
+*   Improve documentation of disabling error codes (Shantanu, PR [15841](https://github.com/python/mypy/pull/15841))
+
+#### Other Notable Changes and Fixes
+
+*   Make unsupported PEP 695 features (introduced in Python 3.12) give a reasonable error message (Shantanu, PR [16013](https://github.com/python/mypy/pull/16013))
+*   Remove the \--py2 command-line argument (Marc Mueller, PR [15670](https://github.com/python/mypy/pull/15670))
+*   Change empty tuple from tuple\[\] to tuple\[()\] in error messages (Nikita Sobolev, PR [15783](https://github.com/python/mypy/pull/15783))
+*   Fix assert\_type failures when some nodes are deferred (Nikita Sobolev, PR [15920](https://github.com/python/mypy/pull/15920))
+*   Generate error on unbound TypeVar with values (Nikita Sobolev, PR [15732](https://github.com/python/mypy/pull/15732))
+*   Fix over-eager types-google-cloud-ndb suggestion (Shantanu, PR [15347](https://github.com/python/mypy/pull/15347))
+*   Fix type narrowing of \== None and in (None,) conditions (Marti Raudsepp, PR [15760](https://github.com/python/mypy/pull/15760))
+*   Fix inference for attrs.fields (Shantanu, PR [15688](https://github.com/python/mypy/pull/15688))
+*   Make “await in non-async function” a non-blocking error and give it an error code (Gregory Santosa, PR [15384](https://github.com/python/mypy/pull/15384))
+*   Add basic support for decorated overloads (Ivan Levkivskyi, PR [15898](https://github.com/python/mypy/pull/15898))
+*   Fix TypeVar regression with self types (Ivan Levkivskyi, PR [15945](https://github.com/python/mypy/pull/15945))
+*   Add \_\_match\_args\_\_ to dataclasses with no fields (Ali Hamdan, PR [15749](https://github.com/python/mypy/pull/15749))
+*   Include stdout and stderr in dmypy verbose output (Valentin Stanciu, PR [15881](https://github.com/python/mypy/pull/15881))
+*   Improve match narrowing and reachability analysis (Shantanu, PR [15882](https://github.com/python/mypy/pull/15882))
+*   Support \_\_bool\_\_ with Literal in \--warn-unreachable (Jannic Warken, PR [15645](https://github.com/python/mypy/pull/15645))
+*   Fix inheriting from generic @frozen attrs class (Ilya Priven, PR [15700](https://github.com/python/mypy/pull/15700))
+*   Correctly narrow types for tuple\[type\[X\], ...\] (Nikita Sobolev, PR [15691](https://github.com/python/mypy/pull/15691))
+*   Don't flag intentionally empty generators unreachable (Ilya Priven, PR [15722](https://github.com/python/mypy/pull/15722))
+*   Add tox.ini to mypy sdist (Marcel Telka, PR [15853](https://github.com/python/mypy/pull/15853))
+*   Fix mypyc regression with pretty (Shantanu, PR [16124](https://github.com/python/mypy/pull/16124))
+
+#### Typeshed Updates
+
+Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=6a8d653a671925b0a3af61729ff8cf3f90c9c662+0&branch=main&path=stdlib) for full list of typeshed changes.
+
+#### Acknowledgements
+
+Thanks to Max Murin, who did most of the release manager work for this release (I just did the final steps).
+
+Thanks to all mypy contributors who contributed to this release:
+
+*   Albert Tugushev
+*   Alex Waygood
+*   Ali Hamdan
+*   chylek
+*   EXPLOSION
+*   Gregory Santosa
+*   Ilya Priven
+*   Ivan Levkivskyi
+*   Jannic Warken
+*   KotlinIsland
+*   Marc Mueller
+*   Marcel Johannesmann
+*   Marcel Telka
+*   Mark Byrne
+*   Marti Raudsepp
+*   Max Murin
+*   Nikita Sobolev
+*   Shantanu
+*   Valentin Stanciu
+
+Posted by Jukka Lehtosalo
+
+
+## Mypy 1.5
+
+[Thursday, 10 August 2023](https://mypy-lang.blogspot.com/2023/08/mypy-15-released.html)
+
+We’ve just uploaded mypy 1.5 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, deprecations and bug fixes. You can install it as follows:
+
+    python3 -m pip install -U mypy
+
+You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
+
+#### Drop Support for Python 3.7
+
+Mypy no longer supports running with Python 3.7, which has reached end-of-life. This was contributed by Shantanu (PR [15566](https://github.com/python/mypy/pull/15566)).
+
+#### Optional Check to Require Explicit @override
+
+If you enable the explicit-override error code, mypy will generate an error if a method override doesn’t use the @typing.override decorator (as discussed in [PEP 698](https://peps.python.org/pep-0698/#strict-enforcement-per-project)). This way mypy will detect accidentally introduced overrides. Example:
+
+```python
+# mypy: enable-error-code="explicit-override"
+
+from typing_extensions import override
+
+class C:
+    def foo(self) -> None: pass
+    def bar(self) -> None: pass
+
+class D(C):
+    # Error: Method "foo" is not using @override but is
+    # overriding a method
+    def foo(self) -> None:
+        ...
+
+    @override
+    def bar(self) -> None:  # OK
+        ...
+```
+
+You can enable the error code via \--enable-error-code=explicit-override on the mypy command line or enable\_error\_code = explicit-override in the mypy config file.
+
+The override decorator will be available in typing in Python 3.12, but you can also use the backport from a recent version of `typing_extensions` on all supported Python versions.
+
+This feature was contributed by Marc Mueller(PR [15512](https://github.com/python/mypy/pull/15512)).
+
+#### More Flexible TypedDict Creation and Update
+
+Mypy was previously overly strict when type checking TypedDict creation and update operations. Though these checks were often technically correct, they sometimes triggered for apparently valid code. These checks have now been relaxed by default. You can enable stricter checking by using the new \--extra-checks flag.
+
+Construction using the `**` syntax is now more flexible:
+
+```python
+from typing import TypedDict
+
+class A(TypedDict):
+    foo: int
+    bar: int
+
+class B(TypedDict):
+    foo: int
+
+a: A = {"foo": 1, "bar": 2}
+b: B = {"foo": 3}
+a2: A = { **a, **b}  # OK (previously an error)
+```
+
+You can also call update() with a TypedDict argument that contains a subset of the keys in the updated TypedDict:
+```python
+a.update(b)  # OK (previously an error)
+```
+
+This feature was contributed by Ivan Levkivskyi (PR [15425](https://github.com/python/mypy/pull/15425)).
+
+#### Deprecated Flag: \--strict-concatenate
+
+The behavior of \--strict-concatenate is now included in the new \--extra-checks flag, and the old flag is deprecated.
+
+#### Optionally Show Links to Error Code Documentation
+
+If you use \--show-error-code-links, mypy will add documentation links to (many) reported errors. The links are not shown for error messages that are sufficiently obvious, and they are shown once per error code only.
+
+Example output:
+```
+a.py:1: error: Need type annotation for "foo" (hint: "x: List[<type>] = ...")  [var-annotated]
+a.py:1: note: See https://mypy.rtfd.io/en/stable/_refs.html#code-var-annotated for more info
+```
+This was contributed by Ivan Levkivskyi (PR [15449](https://github.com/python/mypy/pull/15449)).
+
+#### Consistently Avoid Type Checking Unreachable Code
+
+If a module top level has unreachable code, mypy won’t type check the unreachable statements. This is consistent with how functions behave. The behavior of \--warn-unreachable is also more consistent now.
+
+This was contributed by Ilya Priven (PR [15386](https://github.com/python/mypy/pull/15386)).
+
+#### Experimental Improved Type Inference for Generic Functions
+
+You can use \--new-type-inference to opt into an experimental new type inference algorithm. It fixes issues when calling a generic functions with an argument that is also a generic function, in particular. This current implementation is still incomplete, but we encourage trying it out and reporting bugs if you encounter regressions. We are planning to enable the new algorithm by default in a future mypy release.
+
+This feature was contributed by Ivan Levkivskyi (PR [15287](https://github.com/python/mypy/pull/15287)).
+
+#### Partial Support for Python 3.12
+
+Mypy and mypyc now support running on recent Python 3.12 development versions. Not all new Python 3.12 features are supported, and we don’t ship compiled wheels for Python 3.12 yet.
+
+*   Fix ast warnings for Python 3.12 (Nikita Sobolev, PR [15558](https://github.com/python/mypy/pull/15558))
+*   mypyc: Fix multiple inheritance with a protocol on Python 3.12 (Jukka Lehtosalo, PR [15572](https://github.com/python/mypy/pull/15572))
+*   mypyc: Fix self-compilation on Python 3.12 (Jukka Lehtosalo, PR [15582](https://github.com/python/mypy/pull/15582))
+*   mypyc: Fix 3.12 issue with pickling of instances with \_\_dict\_\_ (Jukka Lehtosalo, PR [15574](https://github.com/python/mypy/pull/15574))
+*   mypyc: Fix i16 on Python 3.12 (Jukka Lehtosalo, PR [15510](https://github.com/python/mypy/pull/15510))
+*   mypyc: Fix int operations on Python 3.12 (Jukka Lehtosalo, PR [15470](https://github.com/python/mypy/pull/15470))
+*   mypyc: Fix generators on Python 3.12 (Jukka Lehtosalo, PR [15472](https://github.com/python/mypy/pull/15472))
+*   mypyc: Fix classes with \_\_dict\_\_ on 3.12 (Jukka Lehtosalo, PR [15471](https://github.com/python/mypy/pull/15471))
+*   mypyc: Fix coroutines on Python 3.12 (Jukka Lehtosalo, PR [15469](https://github.com/python/mypy/pull/15469))
+*   mypyc: Don't use \_PyErr\_ChainExceptions on 3.12, since it's deprecated (Jukka Lehtosalo, PR [15468](https://github.com/python/mypy/pull/15468))
+*   mypyc: Add Python 3.12 feature macro (Jukka Lehtosalo, PR [15465](https://github.com/python/mypy/pull/15465))
+
+#### Improvements to Dataclasses
+
+*   Improve signature of dataclasses.replace (Ilya Priven, PR [14849](https://github.com/python/mypy/pull/14849))
+*   Fix dataclass/protocol crash on joining types (Ilya Priven, PR [15629](https://github.com/python/mypy/pull/15629))
+*   Fix strict optional handling in dataclasses (Ivan Levkivskyi, PR [15571](https://github.com/python/mypy/pull/15571))
+*   Support optional types for custom dataclass descriptors (Marc Mueller, PR [15628](https://github.com/python/mypy/pull/15628))
+*   Add `__slots__` attribute to dataclasses (Nikita Sobolev, PR [15649](https://github.com/python/mypy/pull/15649))
+*   Support better \_\_post\_init\_\_ method signature for dataclasses (Nikita Sobolev, PR [15503](https://github.com/python/mypy/pull/15503))
+
+#### Mypyc Improvements
+
+*   Support unsigned 8-bit native integer type: mypy\_extensions.u8 (Jukka Lehtosalo, PR [15564](https://github.com/python/mypy/pull/15564))
+*   Support signed 16-bit native integer type: mypy\_extensions.i16 (Jukka Lehtosalo, PR [15464](https://github.com/python/mypy/pull/15464))
+*   Define mypy\_extensions.i16 in stubs (Jukka Lehtosalo, PR [15562](https://github.com/python/mypy/pull/15562))
+*   Document more unsupported features and update supported features (Richard Si, PR [15524](https://github.com/python/mypy/pull/15524))
+*   Fix final NamedTuple classes (Richard Si, PR [15513](https://github.com/python/mypy/pull/15513))
+*   Use C99 compound literals for undefined tuple values (Jukka Lehtosalo, PR [15453](https://github.com/python/mypy/pull/15453))
+*   Don't explicitly assign NULL values in setup functions (Logan Hunt, PR [15379](https://github.com/python/mypy/pull/15379))
+
+#### Stubgen Improvements
+
+*   Teach stubgen to work with complex and unary expressions (Nikita Sobolev, PR [15661](https://github.com/python/mypy/pull/15661))
+*   Support ParamSpec and TypeVarTuple (Ali Hamdan, PR [15626](https://github.com/python/mypy/pull/15626))
+*   Fix crash on non-str docstring (Ali Hamdan, PR [15623](https://github.com/python/mypy/pull/15623))
+
+#### Documentation Updates
+
+*   Add documentation for additional error codes (Ivan Levkivskyi, PR [15539](https://github.com/python/mypy/pull/15539))
+*   Improve documentation of type narrowing (Ilya Priven, PR [15652](https://github.com/python/mypy/pull/15652))
+*   Small improvements to protocol documentation (Shantanu, PR [15460](https://github.com/python/mypy/pull/15460))
+*   Remove confusing instance variable example in cheat sheet (Adel Atallah, PR [15441](https://github.com/python/mypy/pull/15441))
+
+#### Other Notable Fixes and Improvements
+
+*   Constant fold additional unary and binary expressions (Richard Si, PR [15202](https://github.com/python/mypy/pull/15202))
+*   Exclude the same special attributes from Protocol as CPython (Kyle Benesch, PR [15490](https://github.com/python/mypy/pull/15490))
+*   Change the default value of the slots argument of attrs.define to True, to match runtime behavior (Ilya Priven, PR [15642](https://github.com/python/mypy/pull/15642))
+*   Fix type of class attribute if attribute is defined in both class and metaclass (Alex Waygood, PR [14988](https://github.com/python/mypy/pull/14988))
+*   Handle type the same as typing.Type in the first argument of classmethods (Erik Kemperman, PR [15297](https://github.com/python/mypy/pull/15297))
+*   Fix \--find-occurrences flag (Shantanu, PR [15528](https://github.com/python/mypy/pull/15528))
+*   Fix error location for class patterns (Nikita Sobolev, PR [15506](https://github.com/python/mypy/pull/15506))
+*   Fix re-added file with errors in mypy daemon (Ivan Levkivskyi, PR [15440](https://github.com/python/mypy/pull/15440))
+*   Fix dmypy run on Windows (Ivan Levkivskyi, PR [15429](https://github.com/python/mypy/pull/15429))
+*   Fix abstract and non-abstract variant error for property deleter (Shantanu, PR [15395](https://github.com/python/mypy/pull/15395))
+*   Remove special casing for "cannot" in error messages (Ilya Priven, PR [15428](https://github.com/python/mypy/pull/15428))
+*   Add runtime `__slots__` attribute to attrs classes (Nikita Sobolev, PR [15651](https://github.com/python/mypy/pull/15651))
+*   Add get\_expression\_type to CheckerPluginInterface (Ilya Priven, PR [15369](https://github.com/python/mypy/pull/15369))
+*   Remove parameters that no longer exist from NamedTuple.\_make() (Alex Waygood, PR [15578](https://github.com/python/mypy/pull/15578))
+*   Allow using typing.Self in `__all__` with an explicit @staticmethod decorator (Erik Kemperman, PR [15353](https://github.com/python/mypy/pull/15353))
+*   Fix self types in subclass methods without Self annotation (Ivan Levkivskyi, PR [15541](https://github.com/python/mypy/pull/15541))
+*   Check for abstract class objects in tuples (Nikita Sobolev, PR [15366](https://github.com/python/mypy/pull/15366))
+
+#### Typeshed Updates
+
+Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=fc7d4722eaa54803926cee5730e1f784979c0531+0&branch=main&path=stdlib) for full list of typeshed changes.
+
+#### Acknowledgements
+
+Thanks to all mypy contributors who contributed to this release:
+
+*   Adel Atallah
+*   Alex Waygood
+*   Ali Hamdan
+*   Erik Kemperman
+*   Federico Padua
+*   Ilya Priven
+*   Ivan Levkivskyi
+*   Jelle Zijlstra
+*   Jared Hance
+*   Jukka Lehtosalo
+*   Kyle Benesch
+*   Logan Hunt
+*   Marc Mueller
+*   Nikita Sobolev
+*   Richard Si
+*   Shantanu
+*   Stavros Ntentos
+*   Valentin Stanciu
+
+Posted by Valentin Stanciu
+
+
+## Mypy 1.4
+
+[Tuesday, 20 June 2023](https://mypy-lang.blogspot.com/2023/06/mypy-140-released.html)
+
+We’ve just uploaded mypy 1.4 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:
+
+    python3 -m pip install -U mypy
+
+You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
+
+#### The Override Decorator
+
+Mypy can now ensure that when renaming a method, overrides are also renamed. You can explicitly mark a method as overriding a base class method by using the @typing.override decorator ([PEP 698](https://peps.python.org/pep-0698/)). If the method is then renamed in the base class while the method override is not, mypy will generate an error. The decorator will be available in typing in Python 3.12, but you can also use the backport from a recent version of `typing_extensions` on all supported Python versions.
+
+This feature was contributed byThomas M Kehrenberg (PR [14609](https://github.com/python/mypy/pull/14609)).
+
+#### Propagating Type Narrowing to Nested Functions
+
+Previously, type narrowing was not propagated to nested functions because it would not be sound if the narrowed variable changed between the definition of the nested function and the call site. Mypy will now propagate the narrowed type if the variable is not assigned to after the definition of the nested function:
+
+```python
+def outer(x: str | None = None) -> None:
+    if x is None:
+        x = calculate_default()
+    reveal_type(x)  # "str" (narrowed)
+
+    def nested() -> None:
+        reveal_type(x)  # Now "str" (used to be "str | None")
+
+    nested()
+```
+
+This may generate some new errors because asserts that were previously necessary may become tautological or no-ops.
+
+This was contributed by Jukka Lehtosalo (PR [15133](https://github.com/python/mypy/pull/15133)).
+
+#### Narrowing Enum Values Using “==”
+
+Mypy now allows narrowing enum types using the \== operator. Previously this was only supported when using the is operator. This makes exhaustiveness checking with enum types more usable, as the requirement to use the is operator was not very intuitive. In this example mypy can detect that the developer forgot to handle the value MyEnum.C in example
+
+```python
+from enum import Enum
+
+class MyEnum(Enum):
+    A = 0
+    B = 1
+    C = 2
+
+def example(e: MyEnum) -> str:  # Error: Missing return statement
+    if e == MyEnum.A:
+        return 'x'
+    elif e == MyEnum.B:
+        return 'y'
+```
+
+Adding an extra elif case resolves the error:
+
+```python
+...
+def example(e: MyEnum) -> str:  # No error -- all values covered
+    if e == MyEnum.A:
+        return 'x'
+    elif e == MyEnum.B:
+        return 'y'
+    elif e == MyEnum.C:
+        return 'z'
+```
+
+This change can cause false positives in test cases that have assert statements like assert o.x == SomeEnum.X when using \--strict-equality. Example:
+
+```python
+# mypy: strict-equality
+
+from enum import Enum
+
+class MyEnum(Enum):
+    A = 0
+    B = 1
+
+class C:
+    x: MyEnum
+    ...
+
+def test_something() -> None:
+    c = C(...)
+    assert c.x == MyEnum.A
+    c.do_something_that_changes_x()
+    assert c.x == MyEnum.B  # Error: Non-overlapping equality check
+```
+
+These errors can be ignored using \# type: ignore\[comparison-overlap\], or you can perform the assertion using a temporary variable as a workaround:
+
+```python
+...
+def test_something() -> None:
+    ...
+    x = c.x
+    assert x == MyEnum.A  # Does not narrow c.x
+    c.do_something_that_changes_x()
+    x = c.x
+    assert x == MyEnum.B  # OK
+```
+
+This feature was contributed by Shantanu (PR [11521](https://github.com/python/mypy/pull/11521)).
+
+#### Performance Improvements
+
+*   Speed up simplification of large union types and also fix a recursive tuple crash (Shantanu, PR [15128](https://github.com/python/mypy/pull/15128))
+*   Speed up union subtyping (Shantanu, PR [15104](https://github.com/python/mypy/pull/15104))
+*   Don't type check most function bodies when type checking third-party library code, or generally when ignoring errors (Jukka Lehtosalo, PR [14150](https://github.com/python/mypy/pull/14150))
+
+#### Improvements to Plugins
+
+*   attrs.evolve: Support generics and unions (Ilya Konstantinov, PR [15050](https://github.com/python/mypy/pull/15050))
+*   Fix ctypes plugin (Alex Waygood)
+
+#### Fixes to Crashes
+
+*   Fix a crash when function-scope recursive alias appears as upper bound (Ivan Levkivskyi, PR [15159](https://github.com/python/mypy/pull/15159))
+*   Fix crash on follow\_imports\_for\_stubs (Ivan Levkivskyi, PR [15407](https://github.com/python/mypy/pull/15407))
+*   Fix stubtest crash in explicit init subclass (Shantanu, PR [15399](https://github.com/python/mypy/pull/15399))
+*   Fix crash when indexing TypedDict with empty key (Shantanu, PR [15392](https://github.com/python/mypy/pull/15392))
+*   Fix crash on NamedTuple as attribute (Ivan Levkivskyi, PR [15404](https://github.com/python/mypy/pull/15404))
+*   Correctly track loop depth for nested functions/classes (Ivan Levkivskyi, PR [15403](https://github.com/python/mypy/pull/15403))
+*   Fix crash on joins with recursive tuples (Ivan Levkivskyi, PR [15402](https://github.com/python/mypy/pull/15402))
+*   Fix crash with custom ErrorCode subclasses (Marc Mueller, PR [15327](https://github.com/python/mypy/pull/15327))
+*   Fix crash in dataclass protocol with self attribute assignment (Ivan Levkivskyi, PR [15157](https://github.com/python/mypy/pull/15157))
+*   Fix crash on lambda in generic context with generic method in body (Ivan Levkivskyi, PR [15155](https://github.com/python/mypy/pull/15155))
+*   Fix recursive type alias crash in make\_simplified\_union (Ivan Levkivskyi, PR [15216](https://github.com/python/mypy/pull/15216))
+
+#### Improvements to Error Messages
+
+*   Use lower-case built-in collection types such as list\[…\] instead of List\[…\] in errors when targeting Python 3.9+ (Max Murin, PR [15070](https://github.com/python/mypy/pull/15070))
+*   Use X | Y union syntax in error messages when targeting Python 3.10+ (Omar Silva, PR [15102](https://github.com/python/mypy/pull/15102))
+*   Use type instead of Type in errors when targeting Python 3.9+ (Rohit Sanjay, PR [15139](https://github.com/python/mypy/pull/15139))
+*   Do not show unused-ignore errors in unreachable code, and make it a real error code (Ivan Levkivskyi, PR [15164](https://github.com/python/mypy/pull/15164))
+*   Don’t limit the number of errors shown by default (Rohit Sanjay, PR [15138](https://github.com/python/mypy/pull/15138))
+*   Improver message for truthy functions (madt2709, PR [15193](https://github.com/python/mypy/pull/15193))
+*   Output distinct types when type names are ambiguous (teresa0605, PR [15184](https://github.com/python/mypy/pull/15184))
+*   Update message about invalid exception type in try (AJ Rasmussen, PR [15131](https://github.com/python/mypy/pull/15131))
+*   Add explanation if argument type is incompatible because of an unsupported numbers type (Jukka Lehtosalo, PR [15137](https://github.com/python/mypy/pull/15137))
+*   Add more detail to 'signature incompatible with supertype' messages for non-callables (Ilya Priven, PR [15263](https://github.com/python/mypy/pull/15263))
+
+#### Documentation Updates
+
+*   Add \--local-partial-types note to dmypy docs (Alan Du, PR [15259](https://github.com/python/mypy/pull/15259))
+*   Update getting started docs for mypyc for Windows (Valentin Stanciu, PR [15233](https://github.com/python/mypy/pull/15233))
+*   Clarify usage of callables regarding type object in docs (Viicos, PR [15079](https://github.com/python/mypy/pull/15079))
+*   Clarify difference between disallow\_untyped\_defs and disallow\_incomplete\_defs (Ilya Priven, PR [15247](https://github.com/python/mypy/pull/15247))
+*   Use attrs and @attrs.define in documentation and tests (Ilya Priven, PR [15152](https://github.com/python/mypy/pull/15152))
+
+#### Mypyc Improvements
+
+*   Fix unexpected TypeError for certain variables with an inferred optional type (Richard Si, PR [15206](https://github.com/python/mypy/pull/15206))
+*   Inline math literals (Logan Hunt, PR [15324](https://github.com/python/mypy/pull/15324))
+*   Support unpacking mappings in dict display (Richard Si, PR [15203](https://github.com/python/mypy/pull/15203))
+
+#### Changes to Stubgen
+
+*   Do not remove Generic from base classes (Ali Hamdan, PR [15316](https://github.com/python/mypy/pull/15316))
+*   Support yield from statements (Ali Hamdan, PR [15271](https://github.com/python/mypy/pull/15271))
+*   Fix missing total from TypedDict class (Ali Hamdan, PR [15208](https://github.com/python/mypy/pull/15208))
+*   Fix call-based namedtuple omitted from class bases (Ali Hamdan, PR [14680](https://github.com/python/mypy/pull/14680))
+*   Support TypedDict alternative syntax (Ali Hamdan, PR [14682](https://github.com/python/mypy/pull/14682))
+*   Make stubgen respect MYPY\_CACHE\_DIR (Henrik Bäärnhielm, PR [14722](https://github.com/python/mypy/pull/14722))
+*   Fixes and simplifications (Ali Hamdan, PR [15232](https://github.com/python/mypy/pull/15232))
+
+#### Other Notable Fixes and Improvements
+
+*   Fix nested async functions when using TypeVar value restriction (Jukka Lehtosalo, PR [14705](https://github.com/python/mypy/pull/14705))
+*   Always allow returning Any from lambda (Ivan Levkivskyi, PR [15413](https://github.com/python/mypy/pull/15413))
+*   Add foundation for TypeVar defaults (PEP 696) (Marc Mueller, PR [14872](https://github.com/python/mypy/pull/14872))
+*   Update semantic analyzer for TypeVar defaults (PEP 696) (Marc Mueller, PR [14873](https://github.com/python/mypy/pull/14873))
+*   Make dict expression inference more consistent (Ivan Levkivskyi, PR [15174](https://github.com/python/mypy/pull/15174))
+*   Do not block on duplicate base classes (Nikita Sobolev, PR [15367](https://github.com/python/mypy/pull/15367))
+*   Generate an error when both staticmethod and classmethod decorators are used (Juhi Chandalia, PR [15118](https://github.com/python/mypy/pull/15118))
+*   Fix assert\_type behaviour with literals (Carl Karsten, PR [15123](https://github.com/python/mypy/pull/15123))
+*   Fix match subject ignoring redefinitions (Vincent Vanlaer, PR [15306](https://github.com/python/mypy/pull/15306))
+*   Support `__all__`.remove (Shantanu, PR [15279](https://github.com/python/mypy/pull/15279))
+
+#### Typeshed Updates
+
+Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=877e06ad1cfd9fd9967c0b0340a86d0c23ea89ce+0&branch=main&path=stdlib) for full list of typeshed changes.
+
+#### Acknowledgements
+
+Thanks to all mypy contributors who contributed to this release:
+
+*   Adrian Garcia Badaracco
+*   AJ Rasmussen
+*   Alan Du
+*   Alex Waygood
+*   Ali Hamdan
+*   Carl Karsten
+*   dosisod
+*   Ethan Smith
+*   Gregory Santosa
+*   Heather White
+*   Henrik Bäärnhielm
+*   Ilya Konstantinov
+*   Ilya Priven
+*   Ivan Levkivskyi
+*   Juhi Chandalia
+*   Jukka Lehtosalo
+*   Logan Hunt
+*   madt2709
+*   Marc Mueller
+*   Max Murin
+*   Nikita Sobolev
+*   Omar Silva
+*   Özgür
+*   Richard Si
+*   Rohit Sanjay
+*   Shantanu
+*   teresa0605
+*   Thomas M Kehrenberg
+*   Tin Tvrtković
+*   Tushar Sadhwani
+*   Valentin Stanciu
+*   Viicos
+*   Vincent Vanlaer
+*   Wesley Collin Wright
+*   William Santosa
+*   yaegassy
+
+I’d also like to thank my employer, Dropbox, for supporting mypy development.
+
+Posted by Jared Hance
+
+
+## Mypy 1.3
+
+[Wednesday, 10 May 2023](https://mypy-lang.blogspot.com/2023/05/mypy-13-released.html)
+
+ We’ve just uploaded mypy 1.3 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:
+
+    python3 -m pip install -U mypy
+
+You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
+
+#### Performance Improvements
+
+*   Improve performance of union subtyping (Shantanu, PR [15104](https://github.com/python/mypy/pull/15104))
+*   Add negative subtype caches (Ivan Levkivskyi, PR [14884](https://github.com/python/mypy/pull/14884))
+
+#### Stub Tooling Improvements
+
+*   Stubtest: Check that the stub is abstract if the runtime is, even when the stub is an overloaded method (Alex Waygood, PR [14955](https://github.com/python/mypy/pull/14955))
+*   Stubtest: Verify stub methods or properties are decorated with @final if they are decorated with @final at runtime (Alex Waygood, PR [14951](https://github.com/python/mypy/pull/14951))
+*   Stubtest: Fix stubtest false positives with TypedDicts at runtime (Alex Waygood, PR [14984](https://github.com/python/mypy/pull/14984))
+*   Stubgen: Support @functools.cached\_property (Nikita Sobolev, PR [14981](https://github.com/python/mypy/pull/14981))
+*   Improvements to stubgenc (Chad Dombrova, PR [14564](https://github.com/python/mypy/pull/14564))
+
+#### Improvements to attrs
+
+*   Add support for converters with TypeVars on generic attrs classes (Chad Dombrova, PR [14908](https://github.com/python/mypy/pull/14908))
+*   Fix attrs.evolve on bound TypeVar (Ilya Konstantinov, PR [15022](https://github.com/python/mypy/pull/15022))
+
+#### Documentation Updates
+
+*   Improve async documentation (Shantanu, PR [14973](https://github.com/python/mypy/pull/14973))
+*   Improvements to cheat sheet (Shantanu, PR [14972](https://github.com/python/mypy/pull/14972))
+*   Add documentation for bytes formatting error code (Shantanu, PR [14971](https://github.com/python/mypy/pull/14971))
+*   Convert insecure links to use HTTPS (Marti Raudsepp, PR [14974](https://github.com/python/mypy/pull/14974))
+*   Also mention overloads in async iterator documentation (Shantanu, PR [14998](https://github.com/python/mypy/pull/14998))
+*   stubtest: Improve allowlist documentation (Shantanu, PR [15008](https://github.com/python/mypy/pull/15008))
+*   Clarify "Using types... but not at runtime" (Jon Shea, PR [15029](https://github.com/python/mypy/pull/15029))
+*   Fix alignment of cheat sheet example (Ondřej Cvacho, PR [15039](https://github.com/python/mypy/pull/15039))
+*   Fix error for callback protocol matching against callable type object (Shantanu, PR [15042](https://github.com/python/mypy/pull/15042))
+
+#### Error Reporting Improvements
+
+*   Improve bytes formatting error (Shantanu, PR [14959](https://github.com/python/mypy/pull/14959))
+
+#### Mypyc Improvements
+
+*   Fix unions of bools and ints (Tomer Chachamu, PR [15066](https://github.com/python/mypy/pull/15066))
+
+#### Other Fixes and Improvements
+
+*   Fix narrowing union types that include Self with isinstance (Christoph Tyralla, PR [14923](https://github.com/python/mypy/pull/14923))
+*   Allow objects matching SupportsKeysAndGetItem to be unpacked (Bryan Forbes, PR [14990](https://github.com/python/mypy/pull/14990))
+*   Check type guard validity for staticmethods (EXPLOSION, PR [14953](https://github.com/python/mypy/pull/14953))
+*   Fix sys.platform when cross-compiling with emscripten (Ethan Smith, PR [14888](https://github.com/python/mypy/pull/14888))
+
+#### Typeshed Updates
+
+Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=b0ed50e9392a23e52445b630a808153e0e256976+0&branch=main&path=stdlib) for full list of typeshed changes.
+
+#### Acknowledgements
+
+Thanks to all mypy contributors who contributed to this release:
+
+*   Alex Waygood
+*   Amin Alaee
+*   Bryan Forbes
+*   Chad Dombrova
+*   Charlie Denton
+*   Christoph Tyralla
+*   dosisod
+*   Ethan Smith
+*   EXPLOSION
+*   Ilya Konstantinov
+*   Ivan Levkivskyi
+*   Jon Shea
+*   Jukka Lehtosalo
+*   KotlinIsland
+*   Marti Raudsepp
+*   Nikita Sobolev
+*   Ondřej Cvacho
+*   Shantanu
+*   sobolevn
+*   Tomer Chachamu
+*   Yaroslav Halchenko
+
+Posted by Wesley Collin Wright.
+
+
+## Mypy 1.2
+
+[Thursday, 6 April 2023](https://mypy-lang.blogspot.com/2023/04/mypy-12-released.html)
+
+We’ve just uploaded mypy 1.2 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:
+
+    python3 -m pip install -U mypy
+
+You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
+
+#### Improvements to Dataclass Transforms
+
+*   Support implicit default for "init" parameter in field specifiers (Wesley Collin Wright and Jukka Lehtosalo, PR [15010](https://github.com/python/mypy/pull/15010))
+*   Support descriptors in dataclass transform (Jukka Lehtosalo, PR [15006](https://github.com/python/mypy/pull/15006))
+*   Fix frozen\_default in incremental mode (Wesley Collin Wright)
+*   Fix frozen behavior for base classes with direct metaclasses (Wesley Collin Wright, PR [14878](https://github.com/python/mypy/pull/14878))
+
+#### Mypyc: Native Floats
+
+Mypyc now uses a native, unboxed representation for values of type float. Previously these were heap-allocated Python objects. Native floats are faster and use less memory. Code that uses floating-point operations heavily can be several times faster when using native floats.
+
+Various float operations and math functions also now have optimized implementations. Refer to the [documentation](https://mypyc.readthedocs.io/en/latest/float_operations.html) for a full list.
+
+This can change the behavior of existing code that uses subclasses of float. When assigning an instance of a subclass of float to a variable with the float type, it gets implicitly converted to a float instance when compiled:
+
+```python
+from lib import MyFloat  # MyFloat ia a subclass of "float"
+
+def example() -> None:
+    x = MyFloat(1.5)
+    y: float = x  # Implicit conversion from MyFloat to float
+    print(type(y))  # float, not MyFloat
+```
+
+Previously, implicit conversions were applied to int subclasses but not float subclasses.
+
+Also, int values can no longer be assigned to a variable with type float in compiled code, since these types now have incompatible representations. An explicit conversion is required:
+
+```python
+def example(n: int) -> None:
+    a: float = 1  # Error: cannot assign "int" to "float"
+    b: float = 1.0  # OK
+    c: float = n  # Error
+    d: float = float(n)  # OK
+```
+
+This restriction only applies to assignments, since they could otherwise narrow down the type of a variable from float to int. int values can still be implicitly converted to float when passed as arguments to functions that expect float values.
+
+Note that mypyc still doesn’t support arrays of unboxed float values. Using list\[float\] involves heap-allocated float objects, since list can only store boxed values. Support for efficient floating point arrays is one of the next major planned mypyc features.
+
+Related changes:
+
+*   Use a native unboxed representation for floats (Jukka Lehtosalo, PR [14880](https://github.com/python/mypy/pull/14880))
+*   Document native floats and integers (Jukka Lehtosalo, PR [14927](https://github.com/python/mypy/pull/14927))
+*   Fixes to float to int conversion (Jukka Lehtosalo, PR [14936](https://github.com/python/mypy/pull/14936))
+
+#### Mypyc: Native Integers
+
+Mypyc now supports signed 32-bit and 64-bit integer types in addition to the arbitrary-precision int type. You can use the types mypy\_extensions.i32 and mypy\_extensions.i64 to speed up code that uses integer operations heavily.
+
+Simple example:
+```python
+from mypy_extensions import i64
+
+def inc(x: i64) -> i64:
+    return x + 1
+```
+
+Refer to the [documentation](https://mypyc.readthedocs.io/en/latest/using_type_annotations.html#native-integer-types) for more information. This feature was contributed by Jukka Lehtosalo.
+
+#### Other Mypyc Fixes and Improvements
+
+*   Support iterating over a TypedDict (Richard Si, PR [14747](https://github.com/python/mypy/pull/14747))
+*   Faster coercions between different tuple types (Jukka Lehtosalo, PR [14899](https://github.com/python/mypy/pull/14899))
+*   Faster calls via type aliases (Jukka Lehtosalo, PR [14784](https://github.com/python/mypy/pull/14784))
+*   Faster classmethod calls via cls (Jukka Lehtosalo, PR [14789](https://github.com/python/mypy/pull/14789))
+
+#### Fixes to Crashes
+
+*   Fix crash on class-level import in protocol definition (Ivan Levkivskyi, PR [14926](https://github.com/python/mypy/pull/14926))
+*   Fix crash on single item union of alias (Ivan Levkivskyi, PR [14876](https://github.com/python/mypy/pull/14876))
+*   Fix crash on ParamSpec in incremental mode (Ivan Levkivskyi, PR [14885](https://github.com/python/mypy/pull/14885))
+
+#### Documentation Updates
+
+*   Update adopting \--strict documentation for 1.0 (Shantanu, PR [14865](https://github.com/python/mypy/pull/14865))
+*   Some minor documentation tweaks (Jukka Lehtosalo, PR [14847](https://github.com/python/mypy/pull/14847))
+*   Improve documentation of top level mypy: disable-error-code comment (Nikita Sobolev, PR [14810](https://github.com/python/mypy/pull/14810))
+
+#### Error Reporting Improvements
+
+*   Add error code to `typing_extensions` suggestion (Shantanu, PR [14881](https://github.com/python/mypy/pull/14881))
+*   Add a separate error code for top-level await (Nikita Sobolev, PR [14801](https://github.com/python/mypy/pull/14801))
+*   Don’t suggest two obsolete stub packages (Jelle Zijlstra, PR [14842](https://github.com/python/mypy/pull/14842))
+*   Add suggestions for pandas-stubs and lxml-stubs (Shantanu, PR [14737](https://github.com/python/mypy/pull/14737))
+
+#### Other Fixes and Improvements
+
+*   Multiple inheritance considers callable objects as subtypes of functions (Christoph Tyralla, PR [14855](https://github.com/python/mypy/pull/14855))
+*   stubtest: Respect @final runtime decorator and enforce it in stubs (Nikita Sobolev, PR [14922](https://github.com/python/mypy/pull/14922))
+*   Fix false positives related to type\[<type-var>\] (sterliakov, PR [14756](https://github.com/python/mypy/pull/14756))
+*   Fix duplication of ParamSpec prefixes and properly substitute ParamSpecs (EXPLOSION, PR [14677](https://github.com/python/mypy/pull/14677))
+*   Fix line number if `__iter__` is incorrectly reported as missing (Jukka Lehtosalo, PR [14893](https://github.com/python/mypy/pull/14893))
+*   Fix incompatible overrides of overloaded generics with self types (Shantanu, PR [14882](https://github.com/python/mypy/pull/14882))
+*   Allow SupportsIndex in slice expressions (Shantanu, PR [14738](https://github.com/python/mypy/pull/14738))
+*   Support if statements in bodies of dataclasses and classes that use dataclass\_transform (Jacek Chałupka, PR [14854](https://github.com/python/mypy/pull/14854))
+*   Allow iterable class objects to be unpacked (including enums) (Alex Waygood, PR [14827](https://github.com/python/mypy/pull/14827))
+*   Fix narrowing for walrus expressions used in match statements (Shantanu, PR [14844](https://github.com/python/mypy/pull/14844))
+*   Add signature for attr.evolve (Ilya Konstantinov, PR [14526](https://github.com/python/mypy/pull/14526))
+*   Fix Any inference when unpacking iterators that don't directly inherit from typing.Iterator (Alex Waygood, PR [14821](https://github.com/python/mypy/pull/14821))
+*   Fix unpack with overloaded `__iter__` method (Nikita Sobolev, PR [14817](https://github.com/python/mypy/pull/14817))
+*   Reduce size of JSON data in mypy cache (dosisod, PR [14808](https://github.com/python/mypy/pull/14808))
+*   Improve “used before definition” checks when a local definition has the same name as a global definition (Stas Ilinskiy, PR [14517](https://github.com/python/mypy/pull/14517))
+*   Honor NoReturn as \_\_setitem\_\_ return type to mark unreachable code (sterliakov, PR [12572](https://github.com/python/mypy/pull/12572))
+
+#### Typeshed Updates
+
+Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=a544b75320e97424d2d927605316383c755cdac0+0&branch=main&path=stdlib) for full list of typeshed changes.
+
+#### Acknowledgements
+
+Thanks to all mypy contributors who contributed to this release:
+
+*   Alex Waygood
+*   Avasam
+*   Christoph Tyralla
+*   dosisod
+*   EXPLOSION
+*   Ilya Konstantinov
+*   Ivan Levkivskyi
+*   Jacek Chałupka
+*   Jelle Zijlstra
+*   Jukka Lehtosalo
+*   Marc Mueller
+*   Max Murin
+*   Nikita Sobolev
+*   Richard Si
+*   Shantanu
+*   Stas Ilinskiy
+*   sterliakov
+*   Wesley Collin Wright
+
+Posted by Jukka Lehtosalo
+
+
+## Mypy 1.1.1
+
+[Monday, 6 March 2023](https://mypy-lang.blogspot.com/2023/03/mypy-111-released.html)
+
+ We’ve just uploaded mypy 1.1.1 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:
+
+    python3 -m pip install -U mypy
+
+You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
+
+#### Support for `dataclass_transform``
+
+This release adds full support for the dataclass\_transform decorator defined in [PEP 681](https://peps.python.org/pep-0681/#decorator-function-example). This allows decorators, base classes, and metaclasses that generate a \_\_init\_\_ method or other methods based on the properties of that class (similar to dataclasses) to have those methods recognized by mypy.
+
+This was contributed by Wesley Collin Wright.
+
+#### Dedicated Error Code for Method Assignments
+
+Mypy can’t safely check all assignments to methods (a form of monkey patching), so mypy generates an error by default. To make it easier to ignore this error, mypy now uses the new error code method-assign for this. By disabling this error code in a file or globally, mypy will no longer complain about assignments to methods if the signatures are compatible.
+
+Mypy also supports the old error code assignment for these assignments to prevent a backward compatibility break. More generally, we can use this mechanism in the future if we wish to split or rename another existing error code without causing backward compatibility issues.
+
+This was contributed by Ivan Levkivskyi (PR [14570](https://github.com/python/mypy/pull/14570)).
+
+#### Fixes to Crashes
+
+*   Fix a crash on walrus in comprehension at class scope (Ivan Levkivskyi, PR [14556](https://github.com/python/mypy/pull/14556))
+*   Fix crash related to value-constrained TypeVar (Shantanu, PR [14642](https://github.com/python/mypy/pull/14642))
+
+#### Fixes to Cache Corruption
+
+*   Fix generic TypedDict/NamedTuple caching (Ivan Levkivskyi, PR [14675](https://github.com/python/mypy/pull/14675))
+
+#### Mypyc Fixes and Improvements
+
+*   Raise "non-trait base must be first..." error less frequently (Richard Si, PR [14468](https://github.com/python/mypy/pull/14468))
+*   Generate faster code for bool comparisons and arithmetic (Jukka Lehtosalo, PR [14489](https://github.com/python/mypy/pull/14489))
+*   Optimize \_\_(a)enter\_\_/\_\_(a)exit\_\_ for native classes (Jared Hance, PR [14530](https://github.com/python/mypy/pull/14530))
+*   Detect if attribute definition conflicts with base class/trait (Jukka Lehtosalo, PR [14535](https://github.com/python/mypy/pull/14535))
+*   Support \_\_(r)divmod\_\_ dunders (Richard Si, PR [14613](https://github.com/python/mypy/pull/14613))
+*   Support \_\_pow\_\_, \_\_rpow\_\_, and \_\_ipow\_\_ dunders (Richard Si, PR [14616](https://github.com/python/mypy/pull/14616))
+*   Fix crash on star unpacking to underscore (Ivan Levkivskyi, PR [14624](https://github.com/python/mypy/pull/14624))
+*   Fix iterating over a union of dicts (Richard Si, PR [14713](https://github.com/python/mypy/pull/14713))
+
+#### Fixes to Detecting Undefined Names (used-before-def)
+
+*   Correctly handle walrus operator (Stas Ilinskiy, PR [14646](https://github.com/python/mypy/pull/14646))
+*   Handle walrus declaration in match subject correctly (Stas Ilinskiy, PR [14665](https://github.com/python/mypy/pull/14665))
+
+#### Stubgen Improvements
+
+Stubgen is a tool for automatically generating draft stubs for libraries.
+
+*   Allow aliases below the top level (Chad Dombrova, PR [14388](https://github.com/python/mypy/pull/14388))
+*   Fix crash with PEP 604 union in type variable bound (Shantanu, PR [14557](https://github.com/python/mypy/pull/14557))
+*   Preserve PEP 604 unions in generated .pyi files (hamdanal, PR [14601](https://github.com/python/mypy/pull/14601))
+
+#### Stubtest Improvements
+
+Stubtest is a tool for testing that stubs conform to the implementations.
+
+*   Update message format so that it’s easier to go to error location (Avasam, PR [14437](https://github.com/python/mypy/pull/14437))
+*   Handle name-mangling edge cases better (Alex Waygood, PR [14596](https://github.com/python/mypy/pull/14596))
+
+#### Changes to Error Reporting and Messages
+
+*   Add new TypedDict error code typeddict-unknown-key (JoaquimEsteves, PR [14225](https://github.com/python/mypy/pull/14225))
+*   Give arguments a more reasonable location in error messages (Max Murin, PR [14562](https://github.com/python/mypy/pull/14562))
+*   In error messages, quote just the module's name (Ilya Konstantinov, PR [14567](https://github.com/python/mypy/pull/14567))
+*   Improve misleading message about Enum() (Rodrigo Silva, PR [14590](https://github.com/python/mypy/pull/14590))
+*   Suggest importing from `typing_extensions` if definition is not in typing (Shantanu, PR [14591](https://github.com/python/mypy/pull/14591))
+*   Consistently use type-abstract error code (Ivan Levkivskyi, PR [14619](https://github.com/python/mypy/pull/14619))
+*   Consistently use literal-required error code for TypedDicts (Ivan Levkivskyi, PR [14621](https://github.com/python/mypy/pull/14621))
+*   Adjust inconsistent dataclasses plugin error messages (Wesley Collin Wright, PR [14637](https://github.com/python/mypy/pull/14637))
+*   Consolidate literal bool argument error messages (Wesley Collin Wright, PR [14693](https://github.com/python/mypy/pull/14693))
+
+#### Other Fixes and Improvements
+
+*   Check that type guards accept a positional argument (EXPLOSION, PR [14238](https://github.com/python/mypy/pull/14238))
+*   Fix bug with in operator used with a union of Container and Iterable (Max Murin, PR [14384](https://github.com/python/mypy/pull/14384))
+*   Support protocol inference for type\[T\] via metaclass (Ivan Levkivskyi, PR [14554](https://github.com/python/mypy/pull/14554))
+*   Allow overlapping comparisons between bytes-like types (Shantanu, PR [14658](https://github.com/python/mypy/pull/14658))
+*   Fix mypy daemon documentation link in README (Ivan Levkivskyi, PR [14644](https://github.com/python/mypy/pull/14644))
+
+#### Typeshed Updates
+
+Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=5ebf892d0710a6e87925b8d138dfa597e7bb11cc+0&branch=main&path=stdlib) for full list of typeshed changes.
+
+#### Acknowledgements
+
+Thanks to all mypy contributors who contributed to this release:
+
+*   Alex Waygood
+*   Avasam
+*   Chad Dombrova
+*   dosisod
+*   EXPLOSION
+*   hamdanal
+*   Ilya Konstantinov
+*   Ivan Levkivskyi
+*   Jared Hance
+*   JoaquimEsteves
+*   Jukka Lehtosalo
+*   Marc Mueller
+*   Max Murin
+*   Michael Lee
+*   Michael R. Crusoe
+*   Richard Si
+*   Rodrigo Silva
+*   Shantanu
+*   Stas Ilinskiy
+*   Wesley Collin Wright
+*   Yilei "Dolee" Yang
+*   Yurii Karabas
+
+We’d also like to thank our employer, Dropbox, for funding the mypy core team.
+
+Posted by Max Murin
+
+
+## Mypy 1.0
+
+[Monday, 6 February 2023](https://mypy-lang.blogspot.com/2023/02/mypy-10-released.html)
+
+We’ve just uploaded mypy 1.0 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:
+
+    python3 -m pip install -U mypy
+
+You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
+
+#### New Release Versioning Scheme
+
+Now that mypy reached 1.0, we’ll switch to a new versioning scheme. Mypy version numbers will be of form x.y.z.
+
+Rules:
+
+*   The major release number (x) is incremented if a feature release includes a significant backward incompatible change that affects a significant fraction of users.
+*   The minor release number (y) is incremented on each feature release. Minor releases include updated stdlib stubs from typeshed.
+*   The point release number (z) is incremented when there are fixes only.
+
+Mypy doesn't use SemVer, since most minor releases have at least minor backward incompatible changes in typeshed, at the very least. Also, many type checking features find new legitimate issues in code. These are not considered backward incompatible changes, unless the number of new errors is very high.
+
+Any significant backward incompatible change must be announced in the blog post for the previous feature release, before making the change. The previous release must also provide a flag to explicitly enable or disable the new behavior (whenever practical), so that users will be able to prepare for the changes and report issues. We should keep the feature flag for at least a few releases after we've switched the default.
+
+See [”Release Process” in the mypy wiki](https://github.com/python/mypy/wiki/Release-Process) for more details and for the most up-to-date version of the versioning scheme.
+
+#### Performance Improvements
+
+Mypy 1.0 is up to 40% faster than mypy 0.991 when type checking the Dropbox internal codebase. We also set up a daily job to measure the performance of the most recent development version of mypy to make it easier to track changes in performance.
+
+Many optimizations contributed to this improvement:
+
+*   Improve performance for errors on class with many attributes (Shantanu, PR [14379](https://github.com/python/mypy/pull/14379))
+*   Speed up make\_simplified\_union (Jukka Lehtosalo, PR [14370](https://github.com/python/mypy/pull/14370))
+*   Micro-optimize get\_proper\_type(s) (Jukka Lehtosalo, PR [14369](https://github.com/python/mypy/pull/14369))
+*   Micro-optimize flatten\_nested\_unions (Jukka Lehtosalo, PR [14368](https://github.com/python/mypy/pull/14368))
+*   Some semantic analyzer micro-optimizations (Jukka Lehtosalo, PR [14367](https://github.com/python/mypy/pull/14367))
+*   A few miscellaneous micro-optimizations (Jukka Lehtosalo, PR [14366](https://github.com/python/mypy/pull/14366))
+*   Optimization: Avoid a few uses of contextmanagers in semantic analyzer (Jukka Lehtosalo, PR [14360](https://github.com/python/mypy/pull/14360))
+*   Optimization: Enable always defined attributes in Type subclasses (Jukka Lehtosalo, PR [14356](https://github.com/python/mypy/pull/14356))
+*   Optimization: Remove expensive context manager in type analyzer (Jukka Lehtosalo, PR [14357](https://github.com/python/mypy/pull/14357))
+*   subtypes: fast path for Union/Union subtype check (Hugues, PR [14277](https://github.com/python/mypy/pull/14277))
+*   Micro-optimization: avoid Bogus\[int\] types that cause needless boxing (Jukka Lehtosalo, PR [14354](https://github.com/python/mypy/pull/14354))
+*   Avoid slow error message logic if errors not shown to user (Jukka Lehtosalo, PR [14336](https://github.com/python/mypy/pull/14336))
+*   Speed up the implementation of hasattr() checks (Jukka Lehtosalo, PR [14333](https://github.com/python/mypy/pull/14333))
+*   Avoid the use of a context manager in hot code path (Jukka Lehtosalo, PR [14331](https://github.com/python/mypy/pull/14331))
+*   Change various type queries into faster bool type queries (Jukka Lehtosalo, PR [14330](https://github.com/python/mypy/pull/14330))
+*   Speed up recursive type check (Jukka Lehtosalo, PR [14326](https://github.com/python/mypy/pull/14326))
+*   Optimize subtype checking by avoiding a nested function (Jukka Lehtosalo, PR [14325](https://github.com/python/mypy/pull/14325))
+*   Optimize type parameter checks in subtype checking (Jukka Lehtosalo, PR [14324](https://github.com/python/mypy/pull/14324))
+*   Speed up freshening type variables (Jukka Lehtosalo, PR [14323](https://github.com/python/mypy/pull/14323))
+*   Optimize implementation of TypedDict types for \*\*kwds (Jukka Lehtosalo, PR [14316](https://github.com/python/mypy/pull/14316))
+
+#### Warn About Variables Used Before Definition
+
+Mypy will now generate an error if you use a variable before it’s defined. This feature is enabled by default. By default mypy reports an error when it infers that a variable is always undefined.
+```python
+y = x  # E: Name "x" is used before definition [used-before-def]
+x = 0
+```
+This feature was contributed by Stas Ilinskiy.
+
+#### Detect Possibly Undefined Variables (Experimental)
+
+A new experimental possibly-undefined error code is now available that will detect variables that may be undefined:
+```python
+    if b:
+        x = 0
+    print(x)  # Error: Name "x" may be undefined [possibly-undefined]
+```
+The error code is disabled be default, since it can generate false positives.
+
+This feature was contributed by Stas Ilinskiy.
+
+#### Support the “Self” Type
+
+There is now a simpler syntax for declaring [generic self types](https://mypy.readthedocs.io/en/stable/generics.html#generic-methods-and-generic-self) introduced in [PEP 673](https://peps.python.org/pep-0673/): the Self type. You no longer have to define a type variable to use “self types”, and you can use them with attributes. Example from mypy documentation:
+```python
+from typing import Self
+
+class Friend:
+    other: Self | None = None
+
+    @classmethod
+    def make_pair(cls) -> tuple[Self, Self]:
+        a, b = cls(), cls()
+        a.other = b
+        b.other = a
+        return a, b
+
+class SuperFriend(Friend):
+    pass
+
+# a and b have the inferred type "SuperFriend", not "Friend"
+a, b = SuperFriend.make_pair()
+```
+The feature was introduced in Python 3.11. In earlier Python versions a backport of Self is available in `typing_extensions`.
+
+This was contributed by Ivan Levkivskyi (PR [14041](https://github.com/python/mypy/pull/14041)).
+
+#### Support ParamSpec in Type Aliases
+
+ParamSpec and Concatenate can now be used in type aliases. Example:
+```python
+from typing import ParamSpec, Callable
+
+P = ParamSpec("P")
+A = Callable[P, None]
+
+def f(c: A[int, str]) -> None:
+    c(1, "x")
+```
+This feature was contributed by Ivan Levkivskyi (PR [14159](https://github.com/python/mypy/pull/14159)).
+
+#### ParamSpec and Generic Self Types No Longer Experimental
+
+Support for ParamSpec ([PEP 612](https://www.python.org/dev/peps/pep-0612/)) and generic self types are no longer considered experimental.
+
+#### Miscellaneous New Features
+
+*   Minimal, partial implementation of dataclass\_transform ([PEP 681](https://peps.python.org/pep-0681/)) (Wesley Collin Wright, PR [14523](https://github.com/python/mypy/pull/14523))
+*   Add basic support for `typing_extensions`.TypeVar (Marc Mueller, PR [14313](https://github.com/python/mypy/pull/14313))
+*   Add \--debug-serialize option (Marc Mueller, PR [14155](https://github.com/python/mypy/pull/14155))
+*   Constant fold initializers of final variables (Jukka Lehtosalo, PR [14283](https://github.com/python/mypy/pull/14283))
+*   Enable Final instance attributes for attrs (Tin Tvrtković, PR [14232](https://github.com/python/mypy/pull/14232))
+*   Allow function arguments as base classes (Ivan Levkivskyi, PR [14135](https://github.com/python/mypy/pull/14135))
+*   Allow super() with mixin protocols (Ivan Levkivskyi, PR [14082](https://github.com/python/mypy/pull/14082))
+*   Add type inference for dict.keys membership (Matthew Hughes, PR [13372](https://github.com/python/mypy/pull/13372))
+*   Generate error for class attribute access if attribute is defined with `__slots__` (Harrison McCarty, PR [14125](https://github.com/python/mypy/pull/14125))
+*   Support additional attributes in callback protocols (Ivan Levkivskyi, PR [14084](https://github.com/python/mypy/pull/14084))
+
+#### Fixes to Crashes
+
+*   Fix crash on prefixed ParamSpec with forward reference (Ivan Levkivskyi, PR [14569](https://github.com/python/mypy/pull/14569))
+*   Fix internal crash when resolving the same partial type twice (Shantanu, PR [14552](https://github.com/python/mypy/pull/14552))
+*   Fix crash in daemon mode on new import cycle (Ivan Levkivskyi, PR [14508](https://github.com/python/mypy/pull/14508))
+*   Fix crash in mypy daemon (Ivan Levkivskyi, PR [14497](https://github.com/python/mypy/pull/14497))
+*   Fix crash on Any metaclass in incremental mode (Ivan Levkivskyi, PR [14495](https://github.com/python/mypy/pull/14495))
+*   Fix crash in await inside comprehension outside function (Ivan Levkivskyi, PR [14486](https://github.com/python/mypy/pull/14486))
+*   Fix crash in Self type on forward reference in upper bound (Ivan Levkivskyi, PR [14206](https://github.com/python/mypy/pull/14206))
+*   Fix a crash when incorrect super() is used outside a method (Ivan Levkivskyi, PR [14208](https://github.com/python/mypy/pull/14208))
+*   Fix crash on overriding with frozen attrs (Ivan Levkivskyi, PR [14186](https://github.com/python/mypy/pull/14186))
+*   Fix incremental mode crash on generic function appearing in nested position (Ivan Levkivskyi, PR [14148](https://github.com/python/mypy/pull/14148))
+*   Fix daemon crash on malformed NamedTuple (Ivan Levkivskyi, PR [14119](https://github.com/python/mypy/pull/14119))
+*   Fix crash during ParamSpec inference (Ivan Levkivskyi, PR [14118](https://github.com/python/mypy/pull/14118))
+*   Fix crash on nested generic callable (Ivan Levkivskyi, PR [14093](https://github.com/python/mypy/pull/14093))
+*   Fix crashes with unpacking SyntaxError (Shantanu, PR [11499](https://github.com/python/mypy/pull/11499))
+*   Fix crash on partial type inference within a lambda (Ivan Levkivskyi, PR [14087](https://github.com/python/mypy/pull/14087))
+*   Fix crash with enums (Michael Lee, PR [14021](https://github.com/python/mypy/pull/14021))
+*   Fix crash with malformed TypedDicts and disllow-any-expr (Michael Lee, PR [13963](https://github.com/python/mypy/pull/13963))
+
+#### Error Reporting Improvements
+
+*   More helpful error for missing self (Shantanu, PR [14386](https://github.com/python/mypy/pull/14386))
+*   Add error-code truthy-iterable (Marc Mueller, PR [13762](https://github.com/python/mypy/pull/13762))
+*   Fix pluralization in error messages (KotlinIsland, PR [14411](https://github.com/python/mypy/pull/14411))
+
+#### Mypyc: Support Match Statement
+
+Mypyc can now compile Python 3.10 match statements.
+
+This was contributed by dosisod (PR [13953](https://github.com/python/mypy/pull/13953)).
+
+#### Other Mypyc Fixes and Improvements
+
+*   Optimize int(x)/float(x)/complex(x) on instances of native classes (Richard Si, PR [14450](https://github.com/python/mypy/pull/14450))
+*   Always emit warnings (Richard Si, PR [14451](https://github.com/python/mypy/pull/14451))
+*   Faster bool and integer conversions (Jukka Lehtosalo, PR [14422](https://github.com/python/mypy/pull/14422))
+*   Support attributes that override properties (Jukka Lehtosalo, PR [14377](https://github.com/python/mypy/pull/14377))
+*   Precompute set literals for "in" operations and iteration (Richard Si, PR [14409](https://github.com/python/mypy/pull/14409))
+*   Don't load targets with forward references while setting up non-extension class `__all__` (Richard Si, PR [14401](https://github.com/python/mypy/pull/14401))
+*   Compile away NewType type calls (Richard Si, PR [14398](https://github.com/python/mypy/pull/14398))
+*   Improve error message for multiple inheritance (Joshua Bronson, PR [14344](https://github.com/python/mypy/pull/14344))
+*   Simplify union types (Jukka Lehtosalo, PR [14363](https://github.com/python/mypy/pull/14363))
+*   Fixes to union simplification (Jukka Lehtosalo, PR [14364](https://github.com/python/mypy/pull/14364))
+*   Fix for typeshed changes to Collection (Shantanu, PR [13994](https://github.com/python/mypy/pull/13994))
+*   Allow use of enum.Enum (Shantanu, PR [13995](https://github.com/python/mypy/pull/13995))
+*   Fix compiling on Arch Linux (dosisod, PR [13978](https://github.com/python/mypy/pull/13978))
+
+#### Documentation Improvements
+
+*   Various documentation and error message tweaks (Jukka Lehtosalo, PR [14574](https://github.com/python/mypy/pull/14574))
+*   Improve Generics documentation (Shantanu, PR [14587](https://github.com/python/mypy/pull/14587))
+*   Improve protocols documentation (Shantanu, PR [14577](https://github.com/python/mypy/pull/14577))
+*   Improve dynamic typing documentation (Shantanu, PR [14576](https://github.com/python/mypy/pull/14576))
+*   Improve the Common Issues page (Shantanu, PR [14581](https://github.com/python/mypy/pull/14581))
+*   Add a top-level TypedDict page (Shantanu, PR [14584](https://github.com/python/mypy/pull/14584))
+*   More improvements to getting started documentation (Shantanu, PR [14572](https://github.com/python/mypy/pull/14572))
+*   Move truthy-function documentation from “optional checks” to “enabled by default” (Anders Kaseorg, PR [14380](https://github.com/python/mypy/pull/14380))
+*   Avoid use of implicit optional in decorator factory documentation (Tom Schraitle, PR [14156](https://github.com/python/mypy/pull/14156))
+*   Clarify documentation surrounding install-types (Shantanu, PR [14003](https://github.com/python/mypy/pull/14003))
+*   Improve searchability for module level type ignore errors (Shantanu, PR [14342](https://github.com/python/mypy/pull/14342))
+*   Advertise mypy daemon in README (Ivan Levkivskyi, PR [14248](https://github.com/python/mypy/pull/14248))
+*   Add link to error codes in README (Ivan Levkivskyi, PR [14249](https://github.com/python/mypy/pull/14249))
+*   Document that report generation disables cache (Ilya Konstantinov, PR [14402](https://github.com/python/mypy/pull/14402))
+*   Stop saying mypy is beta software (Ivan Levkivskyi, PR [14251](https://github.com/python/mypy/pull/14251))
+*   Flycheck-mypy is deprecated, since its functionality was merged to Flycheck (Ivan Levkivskyi, PR [14247](https://github.com/python/mypy/pull/14247))
+*   Update code example in "Declaring decorators" (ChristianWitzler, PR [14131](https://github.com/python/mypy/pull/14131))
+
+#### Stubtest Improvements
+
+Stubtest is a tool for testing that stubs conform to the implementations.
+
+*   Improve error message for `__all__`\-related errors (Alex Waygood, PR [14362](https://github.com/python/mypy/pull/14362))
+*   Improve heuristics for determining whether global-namespace names are imported (Alex Waygood, PR [14270](https://github.com/python/mypy/pull/14270))
+*   Catch BaseException on module imports (Shantanu, PR [14284](https://github.com/python/mypy/pull/14284))
+*   Associate exported symbol error with `__all__` object\_path (Nikita Sobolev, PR [14217](https://github.com/python/mypy/pull/14217))
+*   Add \_\_warningregistry\_\_ to the list of ignored module dunders (Nikita Sobolev, PR [14218](https://github.com/python/mypy/pull/14218))
+*   If a default is present in the stub, check that it is correct (Jelle Zijlstra, PR [14085](https://github.com/python/mypy/pull/14085))
+
+#### Stubgen Improvements
+
+Stubgen is a tool for automatically generating draft stubs for libraries.
+
+*   Treat dlls as C modules (Shantanu, PR [14503](https://github.com/python/mypy/pull/14503))
+
+#### Other Notable Fixes and Improvements
+
+*   Update stub suggestions based on recent typeshed changes (Alex Waygood, PR [14265](https://github.com/python/mypy/pull/14265))
+*   Fix attrs protocol check with cache (Marc Mueller, PR [14558](https://github.com/python/mypy/pull/14558))
+*   Fix strict equality check if operand item type has custom \_\_eq\_\_ (Jukka Lehtosalo, PR [14513](https://github.com/python/mypy/pull/14513))
+*   Don't consider object always truthy (Jukka Lehtosalo, PR [14510](https://github.com/python/mypy/pull/14510))
+*   Properly support union of TypedDicts as dict literal context (Ivan Levkivskyi, PR [14505](https://github.com/python/mypy/pull/14505))
+*   Properly expand type in generic class with Self and TypeVar with values (Ivan Levkivskyi, PR [14491](https://github.com/python/mypy/pull/14491))
+*   Fix recursive TypedDicts/NamedTuples defined with call syntax (Ivan Levkivskyi, PR [14488](https://github.com/python/mypy/pull/14488))
+*   Fix type inference issue when a class inherits from Any (Shantanu, PR [14404](https://github.com/python/mypy/pull/14404))
+*   Fix false positive on generic base class with six (Ivan Levkivskyi, PR [14478](https://github.com/python/mypy/pull/14478))
+*   Don't read scripts without extensions as modules in namespace mode (Tim Geypens, PR [14335](https://github.com/python/mypy/pull/14335))
+*   Fix inference for constrained type variables within unions (Christoph Tyralla, PR [14396](https://github.com/python/mypy/pull/14396))
+*   Fix Unpack imported from typing (Marc Mueller, PR [14378](https://github.com/python/mypy/pull/14378))
+*   Allow trailing commas in ini configuration of multiline values (Nikita Sobolev, PR [14240](https://github.com/python/mypy/pull/14240))
+*   Fix false negatives involving Unions and generators or coroutines (Shantanu, PR [14224](https://github.com/python/mypy/pull/14224))
+*   Fix ParamSpec constraint for types as callable (Vincent Vanlaer, PR [14153](https://github.com/python/mypy/pull/14153))
+*   Fix type aliases with fixed-length tuples (Jukka Lehtosalo, PR [14184](https://github.com/python/mypy/pull/14184))
+*   Fix issues with type aliases and new style unions (Jukka Lehtosalo, PR [14181](https://github.com/python/mypy/pull/14181))
+*   Simplify unions less aggressively (Ivan Levkivskyi, PR [14178](https://github.com/python/mypy/pull/14178))
+*   Simplify callable overlap logic (Ivan Levkivskyi, PR [14174](https://github.com/python/mypy/pull/14174))
+*   Try empty context when assigning to union typed variables (Ivan Levkivskyi, PR [14151](https://github.com/python/mypy/pull/14151))
+*   Improvements to recursive types (Ivan Levkivskyi, PR [14147](https://github.com/python/mypy/pull/14147))
+*   Make non-numeric non-empty FORCE\_COLOR truthy (Shantanu, PR [14140](https://github.com/python/mypy/pull/14140))
+*   Fix to recursive type aliases (Ivan Levkivskyi, PR [14136](https://github.com/python/mypy/pull/14136))
+*   Correctly handle Enum name on Python 3.11 (Ivan Levkivskyi, PR [14133](https://github.com/python/mypy/pull/14133))
+*   Fix class objects falling back to metaclass for callback protocol (Ivan Levkivskyi, PR [14121](https://github.com/python/mypy/pull/14121))
+*   Correctly support self types in callable ClassVar (Ivan Levkivskyi, PR [14115](https://github.com/python/mypy/pull/14115))
+*   Fix type variable clash in nested positions and in attributes (Ivan Levkivskyi, PR [14095](https://github.com/python/mypy/pull/14095))
+*   Allow class variable as implementation for read only attribute (Ivan Levkivskyi, PR [14081](https://github.com/python/mypy/pull/14081))
+*   Prevent warnings from causing dmypy to fail (Andrzej Bartosiński, PR [14102](https://github.com/python/mypy/pull/14102))
+*   Correctly process nested definitions in mypy daemon (Ivan Levkivskyi, PR [14104](https://github.com/python/mypy/pull/14104))
+*   Don't consider a branch unreachable if there is a possible promotion (Ivan Levkivskyi, PR [14077](https://github.com/python/mypy/pull/14077))
+*   Fix incompatible overrides of overloaded methods in concrete subclasses (Shantanu, PR [14017](https://github.com/python/mypy/pull/14017))
+*   Fix new style union syntax in type aliases (Jukka Lehtosalo, PR [14008](https://github.com/python/mypy/pull/14008))
+*   Fix and optimise overload compatibility checking (Shantanu, PR [14018](https://github.com/python/mypy/pull/14018))
+*   Improve handling of redefinitions through imports (Shantanu, PR [13969](https://github.com/python/mypy/pull/13969))
+*   Preserve (some) implicitly exported types (Shantanu, PR [13967](https://github.com/python/mypy/pull/13967))
+
+#### Typeshed Updates
+
+Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=ea0ae2155e8a04c9837903c3aff8dd5ad5f36ebc+0&branch=main&path=stdlib) for full list of typeshed changes.
+
+#### Acknowledgements
+
+Thanks to all mypy contributors who contributed to this release:
+
+*   Alessio Izzo
+*   Alex Waygood
+*   Anders Kaseorg
+*   Andrzej Bartosiński
+*   Avasam
+*   ChristianWitzler
+*   Christoph Tyralla
+*   dosisod
+*   Harrison McCarty
+*   Hugo van Kemenade
+*   Hugues
+*   Ilya Konstantinov
+*   Ivan Levkivskyi
+*   Jelle Zijlstra
+*   jhance
+*   johnthagen
+*   Jonathan Daniel
+*   Joshua Bronson
+*   Jukka Lehtosalo
+*   KotlinIsland
+*   Lakshay Bisht
+*   Lefteris Karapetsas
+*   Marc Mueller
+*   Matthew Hughes
+*   Michael Lee
+*   Nick Drozd
+*   Nikita Sobolev
+*   Richard Si
+*   Shantanu
+*   Stas Ilinskiy
+*   Tim Geypens
+*   Tin Tvrtković
+*   Tom Schraitle
+*   Valentin Stanciu
+*   Vincent Vanlaer
+
+We’d also like to thank our employer, Dropbox, for funding the mypy core team.
+
+Posted by Stas Ilinskiy
+
+## Previous releases
+
+For information about previous releases, refer to the posts at https://mypy-lang.blogspot.com/

From 838a1d4be1f3cad230d028b0e9cb8e1fb7a4fa5b Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Wed, 18 Oct 2023 11:33:10 +0300
Subject: [PATCH 184/288] Add `unimported-reveal` error code (#16271)

Note: `reveal_type(1) # type: ignore` is problematic, because it
silences the output. So, I've added some docs to advertise not doing so.

Closes https://github.com/python/mypy/issues/16270

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 docs/source/error_code_list2.rst        | 44 ++++++++++++++++++
 mypy/checkexpr.py                       | 26 +++++++++++
 mypy/errorcodes.py                      |  6 +++
 mypy/nodes.py                           | 11 +++--
 mypy/semanal.py                         | 13 +++++-
 mypy/types.py                           |  7 +--
 test-data/unit/check-errorcodes.test    | 62 +++++++++++++++++++++++++
 test-data/unit/fixtures/typing-full.pyi |  3 ++
 8 files changed, 163 insertions(+), 9 deletions(-)

diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst
index 30fad0793771..cc5c9b0a1bc6 100644
--- a/docs/source/error_code_list2.rst
+++ b/docs/source/error_code_list2.rst
@@ -481,3 +481,47 @@ Example:
         @override
         def g(self, y: int) -> None:
             pass
+
+
+.. _code-unimported-reveal:
+
+Check that ``reveal_type`` is imported from typing or typing_extensions [unimported-reveal]
+-------------------------------------------------------------------------------------------
+
+Mypy used to have ``reveal_type`` as a special builtin
+that only existed during type-checking.
+In runtime it fails with expected ``NameError``,
+which can cause real problem in production, hidden from mypy.
+
+But, in Python3.11 ``reveal_type``
+`was added to typing.py <https://docs.python.org/3/library/typing.html#typing.reveal_type>`_.
+``typing_extensions`` ported this helper to all supported Python versions.
+
+Now users can actually import ``reveal_type`` to make the runtime code safe.
+
+.. note::
+
+    Starting with Python 3.11, the ``reveal_type`` function can be imported from ``typing``.
+    To use it with older Python versions, import it from ``typing_extensions`` instead.
+
+.. code-block:: python
+
+    # Use "mypy --enable-error-code unimported-reveal"
+
+    x = 1
+    reveal_type(x)  # Note: Revealed type is "builtins.int" \
+                    # Error: Name "reveal_type" is not defined
+
+Correct usage:
+
+.. code-block:: python
+
+    # Use "mypy --enable-error-code unimported-reveal"
+    from typing import reveal_type   # or `typing_extensions`
+
+    x = 1
+    # This won't raise an error:
+    reveal_type(x)  # Note: Revealed type is "builtins.int"
+
+When this code is enabled, using ``reveal_locals`` is always an error,
+because there's no way one can import it.
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index a5c8c80e1580..1d5233170a10 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -36,6 +36,7 @@
     ARG_STAR2,
     IMPLICITLY_ABSTRACT,
     LITERAL_TYPE,
+    REVEAL_LOCALS,
     REVEAL_TYPE,
     ArgKind,
     AssertTypeExpr,
@@ -4498,6 +4499,7 @@ def visit_reveal_expr(self, expr: RevealExpr) -> Type:
                     self.msg.note(
                         "'reveal_type' always outputs 'Any' in unchecked functions", expr.expr
                     )
+                self.check_reveal_imported(expr)
             return revealed_type
         else:
             # REVEAL_LOCALS
@@ -4512,8 +4514,32 @@ def visit_reveal_expr(self, expr: RevealExpr) -> Type:
                 )
 
                 self.msg.reveal_locals(names_to_types, expr)
+                self.check_reveal_imported(expr)
             return NoneType()
 
+    def check_reveal_imported(self, expr: RevealExpr) -> None:
+        if codes.UNIMPORTED_REVEAL not in self.chk.options.enabled_error_codes:
+            return
+
+        name = ""
+        if expr.kind == REVEAL_LOCALS:
+            name = "reveal_locals"
+        elif expr.kind == REVEAL_TYPE and not expr.is_imported:
+            name = "reveal_type"
+        else:
+            return
+
+        self.chk.fail(f'Name "{name}" is not defined', expr, code=codes.UNIMPORTED_REVEAL)
+        if name == "reveal_type":
+            module = (
+                "typing" if self.chk.options.python_version >= (3, 11) else "typing_extensions"
+            )
+            hint = (
+                'Did you forget to import it from "{module}"?'
+                ' (Suggestion: "from {module} import {name}")'
+            ).format(module=module, name=name)
+            self.chk.note(hint, expr, code=codes.UNIMPORTED_REVEAL)
+
     def visit_type_application(self, tapp: TypeApplication) -> Type:
         """Type check a type application (expr[type, ...]).
 
diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py
index cd9978c2f31c..98600679da53 100644
--- a/mypy/errorcodes.py
+++ b/mypy/errorcodes.py
@@ -249,6 +249,12 @@ def __hash__(self) -> int:
     "General",
     default_enabled=False,
 )
+UNIMPORTED_REVEAL: Final = ErrorCode(
+    "unimported-reveal",
+    "Require explicit import from typing or typing_extensions for reveal_type",
+    "General",
+    default_enabled=False,
+)
 
 
 # Syntax errors are often blocking.
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 6556cd910b46..0e5c078d0227 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -2135,21 +2135,26 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T:
 class RevealExpr(Expression):
     """Reveal type expression reveal_type(expr) or reveal_locals() expression."""
 
-    __slots__ = ("expr", "kind", "local_nodes")
+    __slots__ = ("expr", "kind", "local_nodes", "is_imported")
 
-    __match_args__ = ("expr", "kind", "local_nodes")
+    __match_args__ = ("expr", "kind", "local_nodes", "is_imported")
 
     expr: Expression | None
     kind: int
     local_nodes: list[Var] | None
 
     def __init__(
-        self, kind: int, expr: Expression | None = None, local_nodes: list[Var] | None = None
+        self,
+        kind: int,
+        expr: Expression | None = None,
+        local_nodes: list[Var] | None = None,
+        is_imported: bool = False,
     ) -> None:
         super().__init__()
         self.expr = expr
         self.kind = kind
         self.local_nodes = local_nodes
+        self.is_imported = is_imported
 
     def accept(self, visitor: ExpressionVisitor[T]) -> T:
         return visitor.visit_reveal_expr(self)
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 9c2452252208..179ee7c70bfb 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -243,6 +243,7 @@
     DATACLASS_TRANSFORM_NAMES,
     FINAL_DECORATOR_NAMES,
     FINAL_TYPE_NAMES,
+    IMPORTED_REVEAL_TYPE_NAMES,
     NEVER_NAMES,
     OVERLOAD_NAMES,
     OVERRIDE_DECORATOR_NAMES,
@@ -5056,7 +5057,17 @@ def visit_call_expr(self, expr: CallExpr) -> None:
         elif refers_to_fullname(expr.callee, REVEAL_TYPE_NAMES):
             if not self.check_fixed_args(expr, 1, "reveal_type"):
                 return
-            expr.analyzed = RevealExpr(kind=REVEAL_TYPE, expr=expr.args[0])
+            reveal_imported = False
+            reveal_type_node = self.lookup("reveal_type", expr, suppress_errors=True)
+            if (
+                reveal_type_node
+                and isinstance(reveal_type_node.node, FuncBase)
+                and reveal_type_node.fullname in IMPORTED_REVEAL_TYPE_NAMES
+            ):
+                reveal_imported = True
+            expr.analyzed = RevealExpr(
+                kind=REVEAL_TYPE, expr=expr.args[0], is_imported=reveal_imported
+            )
             expr.analyzed.line = expr.line
             expr.analyzed.column = expr.column
             expr.analyzed.accept(self)
diff --git a/mypy/types.py b/mypy/types.py
index ea81609fc605..d0c19a08e60a 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -128,11 +128,8 @@
     "typing.Reversible",
 )
 
-REVEAL_TYPE_NAMES: Final = (
-    "builtins.reveal_type",
-    "typing.reveal_type",
-    "typing_extensions.reveal_type",
-)
+IMPORTED_REVEAL_TYPE_NAMES: Final = ("typing.reveal_type", "typing_extensions.reveal_type")
+REVEAL_TYPE_NAMES: Final = ("builtins.reveal_type", *IMPORTED_REVEAL_TYPE_NAMES)
 
 ASSERT_TYPE_NAMES: Final = ("typing.assert_type", "typing_extensions.assert_type")
 
diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test
index ac7c8b4c9f9d..2282f21bcfa6 100644
--- a/test-data/unit/check-errorcodes.test
+++ b/test-data/unit/check-errorcodes.test
@@ -1086,3 +1086,65 @@ def unsafe_func(x: object) -> Union[int, str]:
     else:
         return "some string"
 [builtins fixtures/isinstancelist.pyi]
+
+
+###
+# unimported-reveal
+###
+
+[case testUnimportedRevealType]
+# flags: --enable-error-code=unimported-reveal
+x = 1
+reveal_type(x)
+[out]
+main:3: error: Name "reveal_type" is not defined  [unimported-reveal]
+main:3: note: Did you forget to import it from "typing_extensions"? (Suggestion: "from typing_extensions import reveal_type")
+main:3: note: Revealed type is "builtins.int"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnimportedRevealTypePy311]
+# flags: --enable-error-code=unimported-reveal --python-version=3.11
+x = 1
+reveal_type(x)
+[out]
+main:3: error: Name "reveal_type" is not defined  [unimported-reveal]
+main:3: note: Did you forget to import it from "typing"? (Suggestion: "from typing import reveal_type")
+main:3: note: Revealed type is "builtins.int"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnimportedRevealTypeInUncheckedFunc]
+# flags: --enable-error-code=unimported-reveal
+def unchecked():
+    x = 1
+    reveal_type(x)
+[out]
+main:4: error: Name "reveal_type" is not defined  [unimported-reveal]
+main:4: note: Did you forget to import it from "typing_extensions"? (Suggestion: "from typing_extensions import reveal_type")
+main:4: note: Revealed type is "Any"
+main:4: note: 'reveal_type' always outputs 'Any' in unchecked functions
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnimportedRevealTypeImportedTypingExtensions]
+# flags: --enable-error-code=unimported-reveal
+from typing_extensions import reveal_type
+x = 1
+reveal_type(x)  # N: Revealed type is "builtins.int"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testUnimportedRevealTypeImportedTyping311]
+# flags: --enable-error-code=unimported-reveal --python-version=3.11
+from typing import reveal_type
+x = 1
+reveal_type(x)  # N: Revealed type is "builtins.int"
+[builtins fixtures/isinstancelist.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testUnimportedRevealLocals]
+# flags: --enable-error-code=unimported-reveal
+x = 1
+reveal_locals()
+[out]
+main:3: note: Revealed local types are:
+main:3: note:     x: builtins.int
+main:3: error: Name "reveal_locals" is not defined  [unimported-reveal]
+[builtins fixtures/isinstancelist.pyi]
diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi
index 417ae6baf491..e9f0aa199bb4 100644
--- a/test-data/unit/fixtures/typing-full.pyi
+++ b/test-data/unit/fixtures/typing-full.pyi
@@ -192,3 +192,6 @@ def dataclass_transform(
     **kwargs: Any,
 ) -> Callable[[T], T]: ...
 def override(__arg: T) -> T: ...
+
+# Was added in 3.11
+def reveal_type(__obj: T) -> T: ...

From e1f6d6b4547f118787a68bf503f5c86a2801a2bf Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Thu, 19 Oct 2023 21:22:40 +0100
Subject: [PATCH 185/288] [mypyc] Avoid cyclic reference in nested functions
 (#16268)

Mypyc used to always put nested functions into the environment object,
which results in cyclic references, since the function object contains a
reference to the environment.

Now we only do this if the body of a nested function refers to a nested
function (e.g. due to a recursive call). This means that in the majority
of cases we can avoid the cyclic reference.

This speeds up self check by an impressive 7%. I'm not sure exactly why
the impact is so big, but spending less time in the cyclic garbage
collector is probably a big part.
---
 mypyc/irbuild/builder.py            |   5 +
 mypyc/irbuild/context.py            |   2 +
 mypyc/irbuild/env_class.py          |   2 +-
 mypyc/irbuild/function.py           |  43 +++-
 mypyc/test-data/irbuild-basic.test  | 219 ++++++++--------
 mypyc/test-data/irbuild-nested.test | 380 ++++++++++++----------------
 6 files changed, 305 insertions(+), 346 deletions(-)

diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py
index 8c68f91bf633..0757415f6753 100644
--- a/mypyc/irbuild/builder.py
+++ b/mypyc/irbuild/builder.py
@@ -502,6 +502,11 @@ def non_function_scope(self) -> bool:
         # Currently the stack always has at least two items: dummy and top-level.
         return len(self.fn_infos) <= 2
 
+    def top_level_fn_info(self) -> FuncInfo | None:
+        if self.non_function_scope():
+            return None
+        return self.fn_infos[2]
+
     def init_final_static(
         self,
         lvalue: Lvalue,
diff --git a/mypyc/irbuild/context.py b/mypyc/irbuild/context.py
index 676afb507504..a740f0b821d9 100644
--- a/mypyc/irbuild/context.py
+++ b/mypyc/irbuild/context.py
@@ -22,6 +22,7 @@ def __init__(
         contains_nested: bool = False,
         is_decorated: bool = False,
         in_non_ext: bool = False,
+        add_nested_funcs_to_env: bool = False,
     ) -> None:
         self.fitem = fitem
         self.name = name
@@ -47,6 +48,7 @@ def __init__(
         self.contains_nested = contains_nested
         self.is_decorated = is_decorated
         self.in_non_ext = in_non_ext
+        self.add_nested_funcs_to_env = add_nested_funcs_to_env
 
         # TODO: add field for ret_type: RType = none_rprimitive
 
diff --git a/mypyc/irbuild/env_class.py b/mypyc/irbuild/env_class.py
index ded8072deb63..aa223fe20176 100644
--- a/mypyc/irbuild/env_class.py
+++ b/mypyc/irbuild/env_class.py
@@ -107,7 +107,7 @@ def load_env_registers(builder: IRBuilder) -> None:
         load_outer_envs(builder, fn_info.callable_class)
         # If this is a FuncDef, then make sure to load the FuncDef into its own environment
         # class so that the function can be called recursively.
-        if isinstance(fitem, FuncDef):
+        if isinstance(fitem, FuncDef) and fn_info.add_nested_funcs_to_env:
             setup_func_for_recursive_call(builder, fitem, fn_info.callable_class)
 
 
diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py
index 822350ea829b..ebf7fa9a54de 100644
--- a/mypyc/irbuild/function.py
+++ b/mypyc/irbuild/function.py
@@ -19,6 +19,7 @@
     ArgKind,
     ClassDef,
     Decorator,
+    FuncBase,
     FuncDef,
     FuncItem,
     LambdaExpr,
@@ -222,6 +223,7 @@ def c() -> None:
     is_decorated = fitem in builder.fdefs_to_decorators
     is_singledispatch = fitem in builder.singledispatch_impls
     in_non_ext = False
+    add_nested_funcs_to_env = has_nested_func_self_reference(builder, fitem)
     class_name = None
     if cdef:
         ir = builder.mapper.type_to_ir[cdef.info]
@@ -234,14 +236,15 @@ def c() -> None:
         func_name = name
     builder.enter(
         FuncInfo(
-            fitem,
-            func_name,
-            class_name,
-            gen_func_ns(builder),
-            is_nested,
-            contains_nested,
-            is_decorated,
-            in_non_ext,
+            fitem=fitem,
+            name=func_name,
+            class_name=class_name,
+            namespace=gen_func_ns(builder),
+            is_nested=is_nested,
+            contains_nested=contains_nested,
+            is_decorated=is_decorated,
+            in_non_ext=in_non_ext,
+            add_nested_funcs_to_env=add_nested_funcs_to_env,
         )
     )
 
@@ -267,7 +270,13 @@ def c() -> None:
         builder.enter(fn_info)
         setup_env_for_generator_class(builder)
         load_outer_envs(builder, builder.fn_info.generator_class)
-        if builder.fn_info.is_nested and isinstance(fitem, FuncDef):
+        top_level = builder.top_level_fn_info()
+        if (
+            builder.fn_info.is_nested
+            and isinstance(fitem, FuncDef)
+            and top_level
+            and top_level.add_nested_funcs_to_env
+        ):
             setup_func_for_recursive_call(builder, fitem, builder.fn_info.generator_class)
         create_switch_for_generator_class(builder)
         add_raise_exception_blocks_to_generator_class(builder, fitem.line)
@@ -344,6 +353,20 @@ def c() -> None:
     return func_ir, func_reg
 
 
+def has_nested_func_self_reference(builder: IRBuilder, fitem: FuncItem) -> bool:
+    """Does a nested function contain a self-reference in its body?
+
+    If a nested function only has references in the surrounding function,
+    we don't need to add it to the environment.
+    """
+    if any(isinstance(sym, FuncBase) for sym in builder.free_variables.get(fitem, set())):
+        return True
+    return any(
+        has_nested_func_self_reference(builder, nested)
+        for nested in builder.encapsulating_funcs.get(fitem, [])
+    )
+
+
 def gen_func_ir(
     builder: IRBuilder,
     args: list[Register],
@@ -768,7 +791,7 @@ def get_func_target(builder: IRBuilder, fdef: FuncDef) -> AssignmentTarget:
         # Get the target associated with the previously defined FuncDef.
         return builder.lookup(fdef.original_def)
 
-    if builder.fn_info.is_generator or builder.fn_info.contains_nested:
+    if builder.fn_info.is_generator or builder.fn_info.add_nested_funcs_to_env:
         return builder.lookup(fdef)
 
     return builder.add_local_reg(fdef, object_rprimitive)
diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test
index 33fc8cfaa83b..bf608abb87ad 100644
--- a/mypyc/test-data/irbuild-basic.test
+++ b/mypyc/test-data/irbuild-basic.test
@@ -2694,47 +2694,43 @@ L2:
 def g_a_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.g_a_obj
     r0 :: __main__.a_env
-    r1, g :: object
-    r2 :: str
-    r3 :: object
-    r4 :: str
-    r5, r6, r7, r8 :: object
-    r9 :: str
-    r10 :: object
-    r11 :: str
-    r12, r13 :: object
+    r1 :: str
+    r2 :: object
+    r3 :: str
+    r4, r5, r6, r7 :: object
+    r8 :: str
+    r9 :: object
+    r10 :: str
+    r11, r12 :: object
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.g
-    g = r1
-    r2 = 'Entering'
-    r3 = builtins :: module
-    r4 = 'print'
-    r5 = CPyObject_GetAttr(r3, r4)
-    r6 = PyObject_CallFunctionObjArgs(r5, r2, 0)
-    r7 = r0.f
-    r8 = PyObject_CallFunctionObjArgs(r7, 0)
-    r9 = 'Exited'
-    r10 = builtins :: module
-    r11 = 'print'
-    r12 = CPyObject_GetAttr(r10, r11)
-    r13 = PyObject_CallFunctionObjArgs(r12, r9, 0)
+    r1 = 'Entering'
+    r2 = builtins :: module
+    r3 = 'print'
+    r4 = CPyObject_GetAttr(r2, r3)
+    r5 = PyObject_CallFunctionObjArgs(r4, r1, 0)
+    r6 = r0.f
+    r7 = PyObject_CallFunctionObjArgs(r6, 0)
+    r8 = 'Exited'
+    r9 = builtins :: module
+    r10 = 'print'
+    r11 = CPyObject_GetAttr(r9, r10)
+    r12 = PyObject_CallFunctionObjArgs(r11, r8, 0)
     return 1
 def a(f):
     f :: object
     r0 :: __main__.a_env
     r1 :: bool
     r2 :: __main__.g_a_obj
-    r3, r4 :: bool
-    r5 :: object
+    r3 :: bool
+    g :: object
 L0:
     r0 = a_env()
     r0.f = f; r1 = is_error
     r2 = g_a_obj()
     r2.__mypyc_env__ = r0; r3 = is_error
-    r0.g = r2; r4 = is_error
-    r5 = r0.g
-    return r5
+    g = r2
+    return g
 def g_b_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -2751,47 +2747,43 @@ L2:
 def g_b_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.g_b_obj
     r0 :: __main__.b_env
-    r1, g :: object
-    r2 :: str
-    r3 :: object
-    r4 :: str
-    r5, r6, r7, r8 :: object
-    r9 :: str
-    r10 :: object
-    r11 :: str
-    r12, r13 :: object
+    r1 :: str
+    r2 :: object
+    r3 :: str
+    r4, r5, r6, r7 :: object
+    r8 :: str
+    r9 :: object
+    r10 :: str
+    r11, r12 :: object
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.g
-    g = r1
-    r2 = '---'
-    r3 = builtins :: module
-    r4 = 'print'
-    r5 = CPyObject_GetAttr(r3, r4)
-    r6 = PyObject_CallFunctionObjArgs(r5, r2, 0)
-    r7 = r0.f
-    r8 = PyObject_CallFunctionObjArgs(r7, 0)
-    r9 = '---'
-    r10 = builtins :: module
-    r11 = 'print'
-    r12 = CPyObject_GetAttr(r10, r11)
-    r13 = PyObject_CallFunctionObjArgs(r12, r9, 0)
+    r1 = '---'
+    r2 = builtins :: module
+    r3 = 'print'
+    r4 = CPyObject_GetAttr(r2, r3)
+    r5 = PyObject_CallFunctionObjArgs(r4, r1, 0)
+    r6 = r0.f
+    r7 = PyObject_CallFunctionObjArgs(r6, 0)
+    r8 = '---'
+    r9 = builtins :: module
+    r10 = 'print'
+    r11 = CPyObject_GetAttr(r9, r10)
+    r12 = PyObject_CallFunctionObjArgs(r11, r8, 0)
     return 1
 def b(f):
     f :: object
     r0 :: __main__.b_env
     r1 :: bool
     r2 :: __main__.g_b_obj
-    r3, r4 :: bool
-    r5 :: object
+    r3 :: bool
+    g :: object
 L0:
     r0 = b_env()
     r0.f = f; r1 = is_error
     r2 = g_b_obj()
     r2.__mypyc_env__ = r0; r3 = is_error
-    r0.g = r2; r4 = is_error
-    r5 = r0.g
-    return r5
+    g = r2
+    return g
 def d_c_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -2808,20 +2800,17 @@ L2:
 def d_c_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.d_c_obj
     r0 :: __main__.c_env
-    r1, d :: object
-    r2 :: str
-    r3 :: object
-    r4 :: str
-    r5, r6 :: object
+    r1 :: str
+    r2 :: object
+    r3 :: str
+    r4, r5 :: object
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.d
-    d = r1
-    r2 = 'd'
-    r3 = builtins :: module
-    r4 = 'print'
-    r5 = CPyObject_GetAttr(r3, r4)
-    r6 = PyObject_CallFunctionObjArgs(r5, r2, 0)
+    r1 = 'd'
+    r2 = builtins :: module
+    r3 = 'print'
+    r4 = CPyObject_GetAttr(r2, r3)
+    r5 = PyObject_CallFunctionObjArgs(r4, r1, 0)
     return 1
 def c():
     r0 :: __main__.c_env
@@ -2832,16 +2821,15 @@ def c():
     r5, r6 :: object
     r7 :: dict
     r8 :: str
-    r9, r10 :: object
-    r11 :: bool
-    r12 :: dict
-    r13 :: str
-    r14 :: i32
-    r15 :: bit
-    r16 :: str
-    r17 :: object
-    r18 :: str
-    r19, r20, r21, r22 :: object
+    r9, r10, d :: object
+    r11 :: dict
+    r12 :: str
+    r13 :: i32
+    r14 :: bit
+    r15 :: str
+    r16 :: object
+    r17 :: str
+    r18, r19, r20 :: object
 L0:
     r0 = c_env()
     r1 = d_c_obj()
@@ -2854,18 +2842,17 @@ L0:
     r8 = 'a'
     r9 = CPyDict_GetItem(r7, r8)
     r10 = PyObject_CallFunctionObjArgs(r9, r6, 0)
-    r0.d = r10; r11 = is_error
-    r12 = __main__.globals :: static
-    r13 = 'd'
-    r14 = CPyDict_SetItem(r12, r13, r10)
-    r15 = r14 >= 0 :: signed
-    r16 = 'c'
-    r17 = builtins :: module
-    r18 = 'print'
-    r19 = CPyObject_GetAttr(r17, r18)
-    r20 = PyObject_CallFunctionObjArgs(r19, r16, 0)
-    r21 = r0.d
-    r22 = PyObject_CallFunctionObjArgs(r21, 0)
+    d = r10
+    r11 = __main__.globals :: static
+    r12 = 'd'
+    r13 = CPyDict_SetItem(r11, r12, r10)
+    r14 = r13 >= 0 :: signed
+    r15 = 'c'
+    r16 = builtins :: module
+    r17 = 'print'
+    r18 = CPyObject_GetAttr(r16, r17)
+    r19 = PyObject_CallFunctionObjArgs(r18, r15, 0)
+    r20 = PyObject_CallFunctionObjArgs(d, 0)
     return 1
 def __top_level__():
     r0, r1 :: object
@@ -2947,47 +2934,43 @@ L2:
 def g_a_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.g_a_obj
     r0 :: __main__.a_env
-    r1, g :: object
-    r2 :: str
-    r3 :: object
-    r4 :: str
-    r5, r6, r7, r8 :: object
-    r9 :: str
-    r10 :: object
-    r11 :: str
-    r12, r13 :: object
+    r1 :: str
+    r2 :: object
+    r3 :: str
+    r4, r5, r6, r7 :: object
+    r8 :: str
+    r9 :: object
+    r10 :: str
+    r11, r12 :: object
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.g
-    g = r1
-    r2 = 'Entering'
-    r3 = builtins :: module
-    r4 = 'print'
-    r5 = CPyObject_GetAttr(r3, r4)
-    r6 = PyObject_CallFunctionObjArgs(r5, r2, 0)
-    r7 = r0.f
-    r8 = PyObject_CallFunctionObjArgs(r7, 0)
-    r9 = 'Exited'
-    r10 = builtins :: module
-    r11 = 'print'
-    r12 = CPyObject_GetAttr(r10, r11)
-    r13 = PyObject_CallFunctionObjArgs(r12, r9, 0)
+    r1 = 'Entering'
+    r2 = builtins :: module
+    r3 = 'print'
+    r4 = CPyObject_GetAttr(r2, r3)
+    r5 = PyObject_CallFunctionObjArgs(r4, r1, 0)
+    r6 = r0.f
+    r7 = PyObject_CallFunctionObjArgs(r6, 0)
+    r8 = 'Exited'
+    r9 = builtins :: module
+    r10 = 'print'
+    r11 = CPyObject_GetAttr(r9, r10)
+    r12 = PyObject_CallFunctionObjArgs(r11, r8, 0)
     return 1
 def a(f):
     f :: object
     r0 :: __main__.a_env
     r1 :: bool
     r2 :: __main__.g_a_obj
-    r3, r4 :: bool
-    r5 :: object
+    r3 :: bool
+    g :: object
 L0:
     r0 = a_env()
     r0.f = f; r1 = is_error
     r2 = g_a_obj()
     r2.__mypyc_env__ = r0; r3 = is_error
-    r0.g = r2; r4 = is_error
-    r5 = r0.g
-    return r5
+    g = r2
+    return g
 def __top_level__():
     r0, r1 :: object
     r2 :: bit
diff --git a/mypyc/test-data/irbuild-nested.test b/mypyc/test-data/irbuild-nested.test
index adef80263533..b2b884705366 100644
--- a/mypyc/test-data/irbuild-nested.test
+++ b/mypyc/test-data/irbuild-nested.test
@@ -50,25 +50,22 @@ L2:
 def inner_a_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.inner_a_obj
     r0 :: __main__.a_env
-    r1, inner, r2 :: object
+    r1 :: object
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.inner
-    inner = r1
-    r2 = box(None, 1)
-    return r2
+    r1 = box(None, 1)
+    return r1
 def a():
     r0 :: __main__.a_env
     r1 :: __main__.inner_a_obj
-    r2, r3 :: bool
-    r4 :: object
+    r2 :: bool
+    inner :: object
 L0:
     r0 = a_env()
     r1 = inner_a_obj()
     r1.__mypyc_env__ = r0; r2 = is_error
-    r0.inner = r1; r3 = is_error
-    r4 = r0.inner
-    return r4
+    inner = r1
+    return inner
 def second_b_first_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -86,15 +83,12 @@ def second_b_first_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.second_b_first_obj
     r0 :: __main__.first_b_env
     r1 :: __main__.b_env
-    r2, second :: object
-    r3 :: str
+    r2 :: str
 L0:
     r0 = __mypyc_self__.__mypyc_env__
     r1 = r0.__mypyc_env__
-    r2 = r0.second
-    second = r2
-    r3 = 'b.first.second: nested function'
-    return r3
+    r2 = 'b.first.second: nested function'
+    return r2
 def first_b_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -111,35 +105,30 @@ L2:
 def first_b_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.first_b_obj
     r0 :: __main__.b_env
-    r1, first :: object
-    r2 :: __main__.first_b_env
-    r3 :: bool
-    r4 :: __main__.second_b_first_obj
-    r5, r6 :: bool
-    r7 :: object
+    r1 :: __main__.first_b_env
+    r2 :: bool
+    r3 :: __main__.second_b_first_obj
+    r4 :: bool
+    second :: object
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.first
-    first = r1
-    r2 = first_b_env()
-    r2.__mypyc_env__ = r0; r3 = is_error
-    r4 = second_b_first_obj()
-    r4.__mypyc_env__ = r2; r5 = is_error
-    r2.second = r4; r6 = is_error
-    r7 = r2.second
-    return r7
+    r1 = first_b_env()
+    r1.__mypyc_env__ = r0; r2 = is_error
+    r3 = second_b_first_obj()
+    r3.__mypyc_env__ = r1; r4 = is_error
+    second = r3
+    return second
 def b():
     r0 :: __main__.b_env
     r1 :: __main__.first_b_obj
-    r2, r3 :: bool
-    r4 :: object
+    r2 :: bool
+    first :: object
 L0:
     r0 = b_env()
     r1 = first_b_obj()
     r1.__mypyc_env__ = r0; r2 = is_error
-    r0.first = r1; r3 = is_error
-    r4 = r0.first
-    return r4
+    first = r1
+    return first
 def inner_c_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -157,28 +146,24 @@ def inner_c_obj.__call__(__mypyc_self__, s):
     __mypyc_self__ :: __main__.inner_c_obj
     s :: str
     r0 :: __main__.c_env
-    r1, inner :: object
-    r2, r3 :: str
+    r1, r2 :: str
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.inner
-    inner = r1
-    r2 = '!'
-    r3 = PyUnicode_Concat(s, r2)
-    return r3
+    r1 = '!'
+    r2 = PyUnicode_Concat(s, r1)
+    return r2
 def c(num):
     num :: float
     r0 :: __main__.c_env
     r1 :: __main__.inner_c_obj
-    r2, r3 :: bool
-    r4 :: object
+    r2 :: bool
+    inner :: object
 L0:
     r0 = c_env()
     r1 = inner_c_obj()
     r1.__mypyc_env__ = r0; r2 = is_error
-    r0.inner = r1; r3 = is_error
-    r4 = r0.inner
-    return r4
+    inner = r1
+    return inner
 def inner_d_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -196,40 +181,36 @@ def inner_d_obj.__call__(__mypyc_self__, s):
     __mypyc_self__ :: __main__.inner_d_obj
     s :: str
     r0 :: __main__.d_env
-    r1, inner :: object
-    r2, r3 :: str
+    r1, r2 :: str
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.inner
-    inner = r1
-    r2 = '?'
-    r3 = PyUnicode_Concat(s, r2)
-    return r3
+    r1 = '?'
+    r2 = PyUnicode_Concat(s, r1)
+    return r2
 def d(num):
     num :: float
     r0 :: __main__.d_env
     r1 :: __main__.inner_d_obj
-    r2, r3 :: bool
-    r4 :: str
-    r5, r6 :: object
-    r7, a, r8 :: str
-    r9, r10 :: object
-    r11, b :: str
+    r2 :: bool
+    inner :: object
+    r3 :: str
+    r4 :: object
+    r5, a, r6 :: str
+    r7 :: object
+    r8, b :: str
 L0:
     r0 = d_env()
     r1 = inner_d_obj()
     r1.__mypyc_env__ = r0; r2 = is_error
-    r0.inner = r1; r3 = is_error
-    r4 = 'one'
-    r5 = r0.inner
-    r6 = PyObject_CallFunctionObjArgs(r5, r4, 0)
-    r7 = cast(str, r6)
-    a = r7
-    r8 = 'two'
-    r9 = r0.inner
-    r10 = PyObject_CallFunctionObjArgs(r9, r8, 0)
-    r11 = cast(str, r10)
-    b = r11
+    inner = r1
+    r3 = 'one'
+    r4 = PyObject_CallFunctionObjArgs(inner, r3, 0)
+    r5 = cast(str, r4)
+    a = r5
+    r6 = 'two'
+    r7 = PyObject_CallFunctionObjArgs(inner, r6, 0)
+    r8 = cast(str, r7)
+    b = r8
     return a
 def inner():
     r0 :: str
@@ -290,32 +271,28 @@ L2:
 def inner_a_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.inner_a_obj
     r0 :: __main__.a_env
-    r1, inner :: object
-    r2 :: int
+    r1 :: int
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.inner
-    inner = r1
-    r2 = r0.num
-    return r2
+    r1 = r0.num
+    return r1
 def a(num):
     num :: int
     r0 :: __main__.a_env
     r1 :: bool
     r2 :: __main__.inner_a_obj
-    r3, r4 :: bool
-    r5, r6 :: object
-    r7 :: int
+    r3 :: bool
+    inner, r4 :: object
+    r5 :: int
 L0:
     r0 = a_env()
     r0.num = num; r1 = is_error
     r2 = inner_a_obj()
     r2.__mypyc_env__ = r0; r3 = is_error
-    r0.inner = r2; r4 = is_error
-    r5 = r0.inner
-    r6 = PyObject_CallFunctionObjArgs(r5, 0)
-    r7 = unbox(int, r6)
-    return r7
+    inner = r2
+    r4 = PyObject_CallFunctionObjArgs(inner, 0)
+    r5 = unbox(int, r4)
+    return r5
 def inner_b_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -332,36 +309,32 @@ L2:
 def inner_b_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.inner_b_obj
     r0 :: __main__.b_env
-    r1, inner :: object
-    r2 :: bool
-    foo, r3 :: int
+    r1 :: bool
+    foo, r2 :: int
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.inner
-    inner = r1
-    r0.num = 8; r2 = is_error
+    r0.num = 8; r1 = is_error
     foo = 12
-    r3 = r0.num
-    return r3
+    r2 = r0.num
+    return r2
 def b():
     r0 :: __main__.b_env
     r1 :: bool
     r2 :: __main__.inner_b_obj
-    r3, r4 :: bool
-    r5, r6 :: object
-    r7, r8, r9 :: int
+    r3 :: bool
+    inner, r4 :: object
+    r5, r6, r7 :: int
 L0:
     r0 = b_env()
     r0.num = 6; r1 = is_error
     r2 = inner_b_obj()
     r2.__mypyc_env__ = r0; r3 = is_error
-    r0.inner = r2; r4 = is_error
-    r5 = r0.inner
-    r6 = PyObject_CallFunctionObjArgs(r5, 0)
-    r7 = unbox(int, r6)
-    r8 = r0.num
-    r9 = CPyTagged_Add(r7, r8)
-    return r9
+    inner = r2
+    r4 = PyObject_CallFunctionObjArgs(inner, 0)
+    r5 = unbox(int, r4)
+    r6 = r0.num
+    r7 = CPyTagged_Add(r5, r6)
+    return r7
 def inner_c_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -378,14 +351,11 @@ L2:
 def inner_c_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.inner_c_obj
     r0 :: __main__.c_env
-    r1, inner :: object
-    r2 :: str
+    r1 :: str
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.inner
-    inner = r1
-    r2 = 'f.inner: first definition'
-    return r2
+    r1 = 'f.inner: first definition'
+    return r1
 def inner_c_obj_0.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -402,40 +372,37 @@ L2:
 def inner_c_obj_0.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.inner_c_obj_0
     r0 :: __main__.c_env
-    r1, inner :: object
-    r2 :: str
+    r1 :: str
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.inner
-    inner = r1
-    r2 = 'f.inner: second definition'
-    return r2
+    r1 = 'f.inner: second definition'
+    return r1
 def c(flag):
     flag :: bool
     r0 :: __main__.c_env
     r1 :: __main__.inner_c_obj
-    r2, r3 :: bool
-    r4 :: __main__.inner_c_obj_0
-    r5, r6 :: bool
-    r7, r8 :: object
-    r9 :: str
+    r2 :: bool
+    inner :: object
+    r3 :: __main__.inner_c_obj_0
+    r4 :: bool
+    r5 :: object
+    r6 :: str
 L0:
     r0 = c_env()
     if flag goto L1 else goto L2 :: bool
 L1:
     r1 = inner_c_obj()
     r1.__mypyc_env__ = r0; r2 = is_error
-    r0.inner = r1; r3 = is_error
+    inner = r1
     goto L3
 L2:
-    r4 = inner_c_obj_0()
-    r4.__mypyc_env__ = r0; r5 = is_error
-    r0.inner = r4; r6 = is_error
+    r3 = inner_c_obj_0()
+    r3.__mypyc_env__ = r0; r4 = is_error
+    inner = r3
 L3:
-    r7 = r0.inner
-    r8 = PyObject_CallFunctionObjArgs(r7, 0)
-    r9 = cast(str, r8)
-    return r9
+    r5 = PyObject_CallFunctionObjArgs(inner, 0)
+    r6 = cast(str, r5)
+    return r6
 
 [case testSpecialNested]
 def a() -> int:
@@ -465,15 +432,12 @@ def c_a_b_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.c_a_b_obj
     r0 :: __main__.b_a_env
     r1 :: __main__.a_env
-    r2, c :: object
-    r3 :: int
+    r2 :: int
 L0:
     r0 = __mypyc_self__.__mypyc_env__
     r1 = r0.__mypyc_env__
-    r2 = r0.c
-    c = r2
-    r3 = r1.x
-    return r3
+    r2 = r1.x
+    return r2
 def b_a_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -490,48 +454,43 @@ L2:
 def b_a_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.b_a_obj
     r0 :: __main__.a_env
-    r1, b :: object
-    r2 :: __main__.b_a_env
-    r3 :: bool
-    r4, r5 :: int
-    r6 :: bool
-    r7 :: __main__.c_a_b_obj
-    r8, r9 :: bool
-    r10, r11 :: object
-    r12 :: int
+    r1 :: __main__.b_a_env
+    r2 :: bool
+    r3, r4 :: int
+    r5 :: bool
+    r6 :: __main__.c_a_b_obj
+    r7 :: bool
+    c, r8 :: object
+    r9 :: int
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.b
-    b = r1
-    r2 = b_a_env()
-    r2.__mypyc_env__ = r0; r3 = is_error
-    r4 = r0.x
-    r5 = CPyTagged_Add(r4, 2)
-    r0.x = r5; r6 = is_error
-    r7 = c_a_b_obj()
-    r7.__mypyc_env__ = r2; r8 = is_error
-    r2.c = r7; r9 = is_error
-    r10 = r2.c
-    r11 = PyObject_CallFunctionObjArgs(r10, 0)
-    r12 = unbox(int, r11)
-    return r12
+    r1 = b_a_env()
+    r1.__mypyc_env__ = r0; r2 = is_error
+    r3 = r0.x
+    r4 = CPyTagged_Add(r3, 2)
+    r0.x = r4; r5 = is_error
+    r6 = c_a_b_obj()
+    r6.__mypyc_env__ = r1; r7 = is_error
+    c = r6
+    r8 = PyObject_CallFunctionObjArgs(c, 0)
+    r9 = unbox(int, r8)
+    return r9
 def a():
     r0 :: __main__.a_env
     r1 :: bool
     r2 :: __main__.b_a_obj
-    r3, r4 :: bool
-    r5, r6 :: object
-    r7 :: int
+    r3 :: bool
+    b, r4 :: object
+    r5 :: int
 L0:
     r0 = a_env()
     r0.x = 2; r1 = is_error
     r2 = b_a_obj()
     r2.__mypyc_env__ = r0; r3 = is_error
-    r0.b = r2; r4 = is_error
-    r5 = r0.b
-    r6 = PyObject_CallFunctionObjArgs(r5, 0)
-    r7 = unbox(int, r6)
-    return r7
+    b = r2
+    r4 = PyObject_CallFunctionObjArgs(b, 0)
+    r5 = unbox(int, r4)
+    return r5
 
 [case testNestedFunctionInsideStatements]
 def f(flag: bool) -> str:
@@ -559,14 +518,11 @@ L2:
 def inner_f_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.inner_f_obj
     r0 :: __main__.f_env
-    r1, inner :: object
-    r2 :: str
+    r1 :: str
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.inner
-    inner = r1
-    r2 = 'f.inner: first definition'
-    return r2
+    r1 = 'f.inner: first definition'
+    return r1
 def inner_f_obj_0.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -583,40 +539,37 @@ L2:
 def inner_f_obj_0.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.inner_f_obj_0
     r0 :: __main__.f_env
-    r1, inner :: object
-    r2 :: str
+    r1 :: str
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.inner
-    inner = r1
-    r2 = 'f.inner: second definition'
-    return r2
+    r1 = 'f.inner: second definition'
+    return r1
 def f(flag):
     flag :: bool
     r0 :: __main__.f_env
     r1 :: __main__.inner_f_obj
-    r2, r3 :: bool
-    r4 :: __main__.inner_f_obj_0
-    r5, r6 :: bool
-    r7, r8 :: object
-    r9 :: str
+    r2 :: bool
+    inner :: object
+    r3 :: __main__.inner_f_obj_0
+    r4 :: bool
+    r5 :: object
+    r6 :: str
 L0:
     r0 = f_env()
     if flag goto L1 else goto L2 :: bool
 L1:
     r1 = inner_f_obj()
     r1.__mypyc_env__ = r0; r2 = is_error
-    r0.inner = r1; r3 = is_error
+    inner = r1
     goto L3
 L2:
-    r4 = inner_f_obj_0()
-    r4.__mypyc_env__ = r0; r5 = is_error
-    r0.inner = r4; r6 = is_error
+    r3 = inner_f_obj_0()
+    r3.__mypyc_env__ = r0; r4 = is_error
+    inner = r3
 L3:
-    r7 = r0.inner
-    r8 = PyObject_CallFunctionObjArgs(r7, 0)
-    r9 = cast(str, r8)
-    return r9
+    r5 = PyObject_CallFunctionObjArgs(inner, 0)
+    r6 = cast(str, r5)
+    return r6
 
 [case testNestedFunctionsCallEachOther]
 from typing import Callable, List
@@ -652,15 +605,12 @@ L2:
 def foo_f_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.foo_f_obj
     r0 :: __main__.f_env
-    r1, foo :: object
-    r2, r3 :: int
+    r1, r2 :: int
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.foo
-    foo = r1
-    r2 = r0.a
-    r3 = CPyTagged_Add(r2, 2)
-    return r3
+    r1 = r0.a
+    r2 = CPyTagged_Add(r1, 2)
+    return r2
 def bar_f_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -677,16 +627,14 @@ L2:
 def bar_f_obj.__call__(__mypyc_self__):
     __mypyc_self__ :: __main__.bar_f_obj
     r0 :: __main__.f_env
-    r1, bar, r2, r3 :: object
-    r4 :: int
+    r1, r2 :: object
+    r3 :: int
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.bar
-    bar = r1
-    r2 = r0.foo
-    r3 = PyObject_CallFunctionObjArgs(r2, 0)
-    r4 = unbox(int, r3)
-    return r4
+    r1 = r0.foo
+    r2 = PyObject_CallFunctionObjArgs(r1, 0)
+    r3 = unbox(int, r2)
+    return r3
 def baz_f_obj.__get__(__mypyc_self__, instance, owner):
     __mypyc_self__, instance, owner, r0 :: object
     r1 :: bit
@@ -704,23 +652,21 @@ def baz_f_obj.__call__(__mypyc_self__, n):
     __mypyc_self__ :: __main__.baz_f_obj
     n :: int
     r0 :: __main__.f_env
-    r1, baz :: object
-    r2 :: bit
-    r3 :: int
-    r4, r5 :: object
+    r1 :: bit
+    r2 :: int
+    r3, r4, r5 :: object
     r6, r7 :: int
 L0:
     r0 = __mypyc_self__.__mypyc_env__
-    r1 = r0.baz
-    baz = r1
-    r2 = n == 0
-    if r2 goto L1 else goto L2 :: bool
+    r1 = n == 0
+    if r1 goto L1 else goto L2 :: bool
 L1:
     return 0
 L2:
-    r3 = CPyTagged_Subtract(n, 2)
-    r4 = box(int, r3)
-    r5 = PyObject_CallFunctionObjArgs(baz, r4, 0)
+    r2 = CPyTagged_Subtract(n, 2)
+    r3 = r0.baz
+    r4 = box(int, r2)
+    r5 = PyObject_CallFunctionObjArgs(r3, r4, 0)
     r6 = unbox(int, r5)
     r7 = CPyTagged_Add(n, r6)
     return r7

From 1c218ea2c674d7a06c8ed4c2f95855f1d3fd26da Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Fri, 20 Oct 2023 00:13:46 +0100
Subject: [PATCH 186/288] Fix daemon false positives related to module-level
 __getattr__ (#16292)

In some cases, mypy daemon could generate false positives about imports
targeting packages with a module-level `__getattr__` methods. The root
cause was that the `mypy.build.in_partial_package` function would leave
a partially initialized module in the `modules` dictionary of
`BuildManager`, which could probably cause all sorts of confusion. I
fixed this by making sure that ASTs related to temporary `State` objects
don't get persisted.

Also updated a test case to properly delete a package -- an empty
directory is now actually a valid namespace package, so to delete a
package we should delete the directory, not just the files inside it.
---
 mypy/build.py                            | 10 +++++----
 test-data/unit/fine-grained-modules.test |  6 ++----
 test-data/unit/fine-grained.test         | 27 ++++++++++++++++++++++++
 3 files changed, 35 insertions(+), 8 deletions(-)

diff --git a/mypy/build.py b/mypy/build.py
index b481cc6ad0dc..1385021aac48 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -1991,7 +1991,7 @@ def __init__(
                 raise ModuleNotFound
 
             # Parse the file (and then some) to get the dependencies.
-            self.parse_file()
+            self.parse_file(temporary=temporary)
             self.compute_dependencies()
 
     @property
@@ -2109,7 +2109,7 @@ def fix_cross_refs(self) -> None:
 
     # Methods for processing modules from source code.
 
-    def parse_file(self) -> None:
+    def parse_file(self, *, temporary: bool = False) -> None:
         """Parse file and run first pass of semantic analysis.
 
         Everything done here is local to the file. Don't depend on imported
@@ -2194,12 +2194,14 @@ def parse_file(self) -> None:
         else:
             self.early_errors = manager.ast_cache[self.id][1]
 
-        modules[self.id] = self.tree
+        if not temporary:
+            modules[self.id] = self.tree
 
         if not cached:
             self.semantic_analysis_pass1()
 
-        self.check_blockers()
+        if not temporary:
+            self.check_blockers()
 
         manager.ast_cache[self.id] = (self.tree, self.early_errors)
 
diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test
index 163e859276cb..f28dbaa1113b 100644
--- a/test-data/unit/fine-grained-modules.test
+++ b/test-data/unit/fine-grained-modules.test
@@ -837,15 +837,13 @@ p.a.f(1)
 [file p/__init__.py]
 [file p/a.py]
 def f(x: str) -> None: pass
-[delete p/__init__.py.2]
-[delete p/a.py.2]
-def f(x: str) -> None: pass
+[delete p.2]
 [out]
 main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str"
 ==
 main:1: error: Cannot find implementation or library stub for module named "p.a"
 main:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
-main:2: error: "object" has no attribute "a"
+main:1: error: Cannot find implementation or library stub for module named "p"
 
 [case testDeletePackage2]
 import p
diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test
index 68f72a2aa992..cb24467cbf41 100644
--- a/test-data/unit/fine-grained.test
+++ b/test-data/unit/fine-grained.test
@@ -10337,3 +10337,30 @@ b.py:1: note: Use "-> None" if function does not return a value
 ==
 a.py:1: error: Function is missing a return type annotation
 a.py:1: note: Use "-> None" if function does not return a value
+
+[case testModuleLevelGetAttrInStub]
+import stub
+import a
+import b
+
+[file stub/__init__.pyi]
+s: str
+def __getattr__(self): pass
+
+[file a.py]
+
+[file a.py.2]
+from stub import x
+from stub.pkg import y
+from stub.pkg.sub import z
+
+[file b.py]
+
+[file b.py.3]
+from stub import s
+reveal_type(s)
+
+[out]
+==
+==
+b.py:2: note: Revealed type is "builtins.str"

From 5506cba158d76cd11697d1178d73a552aa617b7c Mon Sep 17 00:00:00 2001
From: Ihor <31508183+nautics889@users.noreply.github.com>
Date: Fri, 20 Oct 2023 23:00:39 +0300
Subject: [PATCH 187/288] fix: remove redundant `.format()` (#16288)

Originally this was added in 040f3ab revision at 562th line.
---
 mypyc/codegen/emit.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py
index 7d41ee7e162b..fce6896e8d11 100644
--- a/mypyc/codegen/emit.py
+++ b/mypyc/codegen/emit.py
@@ -686,7 +686,7 @@ def emit_cast(
             if likely:
                 check = f"(likely{check})"
             self.emit_arg_check(src, dest, typ, check, optional)
-            self.emit_lines(f"    {dest} = {src};".format(dest, src), "else {")
+            self.emit_lines(f"    {dest} = {src};", "else {")
             self.emit_cast_error_handler(error, src, dest, typ, raise_exception)
             self.emit_line("}")
         elif is_none_rprimitive(typ):

From eecbcb981708bded48d9c17f5fd7ab843b57b2c0 Mon Sep 17 00:00:00 2001
From: Ganden Schaffner <github@xqzw.me>
Date: Fri, 20 Oct 2023 16:29:04 -0700
Subject: [PATCH 188/288] Correctly recognize `typing_extensions.NewType`
 (#16298)

<!--
Checklist:
- Read the [Contributing
Guidelines](https://github.com/python/mypy/blob/master/CONTRIBUTING.md)
- Add tests for all changed behaviour.
- If you can't add a test, please explain why and how you verified your
changes work.
- Make sure CI passes.
- Please do not force push to the PR once it has been reviewed.
-->

fixes #16297.

since the `.+_NAMES` constants in `types.py` are each referenced
multiple times while other examples like this (i.e. a `.+_NAMES`
tuple/set used only once) are inlined, I've inlined this one.
---
 mypy/semanal_newtype.py           | 2 +-
 test-data/unit/check-newtype.test | 7 +++++++
 2 files changed, 8 insertions(+), 1 deletion(-)

diff --git a/mypy/semanal_newtype.py b/mypy/semanal_newtype.py
index a8380309d310..16c6c024800d 100644
--- a/mypy/semanal_newtype.py
+++ b/mypy/semanal_newtype.py
@@ -147,7 +147,7 @@ def analyze_newtype_declaration(self, s: AssignmentStmt) -> tuple[str | None, Ca
             and isinstance(s.lvalues[0], NameExpr)
             and isinstance(s.rvalue, CallExpr)
             and isinstance(s.rvalue.callee, RefExpr)
-            and s.rvalue.callee.fullname == "typing.NewType"
+            and (s.rvalue.callee.fullname in ("typing.NewType", "typing_extensions.NewType"))
         ):
             name = s.lvalues[0].name
 
diff --git a/test-data/unit/check-newtype.test b/test-data/unit/check-newtype.test
index 0ff6b8396fa7..99fdf5fe7ca3 100644
--- a/test-data/unit/check-newtype.test
+++ b/test-data/unit/check-newtype.test
@@ -379,3 +379,10 @@ N = NewType('N', XXX)  # E: Argument 2 to NewType(...) must be subclassable (got
                        # E: Name "XXX" is not defined
 x: List[Union[N, int]]
 [builtins fixtures/list.pyi]
+
+[case testTypingExtensionsNewType]
+# flags: --python-version 3.7
+from typing_extensions import NewType
+N = NewType("N", int)
+x: N
+[builtins fixtures/tuple.pyi]

From ff8cebbcf5094012ee914308dc4f9ecaa7f4684c Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 21 Oct 2023 00:23:00 -0700
Subject: [PATCH 189/288] Lock test dependencies (#16283)

This was discussed in the contributor meetup today. This is a simple
solution that requires very few changes. If you want to upgrade the lock
file, you can pass `--upgrade` or just delete it and regenerate.
---
 .github/workflows/test.yml |   4 +-
 MANIFEST.in                |   1 +
 test-requirements.in       |  19 +++++++
 test-requirements.txt      | 101 +++++++++++++++++++++++++++++++------
 4 files changed, 108 insertions(+), 17 deletions(-)
 create mode 100644 test-requirements.in

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index afa5d5823ea9..86704aca2f91 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -133,7 +133,7 @@ jobs:
         ./misc/build-debug-python.sh $PYTHONVERSION $PYTHONDIR $VENV
         source $VENV/bin/activate
     - name: Install tox
-      run: pip install --upgrade 'setuptools!=50' tox==4.11.0
+      run: pip install setuptools==68.2.2 tox==4.11.0
     - name: Compiled with mypyc
       if: ${{ matrix.test_mypyc }}
       run: |
@@ -185,7 +185,7 @@ jobs:
           default: 3.11.1
           command: python -c "import platform; print(f'{platform.architecture()=} {platform.machine()=}');"
       - name: Install tox
-        run: pip install --upgrade 'setuptools!=50' tox==4.11.0
+        run: pip install setuptools==68.2.2 tox==4.11.0
       - name: Setup tox environment
         run: tox run -e py --notest
       - name: Test
diff --git a/MANIFEST.in b/MANIFEST.in
index a1c15446de3f..3ae340c7bd5e 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -31,6 +31,7 @@ graft mypyc/doc
 # files necessary for testing sdist
 include mypy-requirements.txt
 include build-requirements.txt
+include test-requirements.in
 include test-requirements.txt
 include mypy_self_check.ini
 prune misc
diff --git a/test-requirements.in b/test-requirements.in
new file mode 100644
index 000000000000..bab3ece29c02
--- /dev/null
+++ b/test-requirements.in
@@ -0,0 +1,19 @@
+# If you change this file (or mypy-requirements.txt or build-requirements.txt), please run:
+# pip-compile --output-file=test-requirements.txt --strip-extras --allow-unsafe test-requirements.in
+
+-r mypy-requirements.txt
+-r build-requirements.txt
+attrs>=18.0
+black==23.9.1  # must match version in .pre-commit-config.yaml
+filelock>=3.3.0
+# lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014
+lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12'
+pre-commit
+pre-commit-hooks==4.5.0
+psutil>=4.0
+pytest>=7.4.0
+pytest-xdist>=1.34.0
+pytest-cov>=2.10.0
+ruff==0.1.0  # must match version in .pre-commit-config.yaml
+setuptools>=65.5.1
+tomli>=1.1.0  # needed even on py311+ so the self check passes with --python-version 3.7
diff --git a/test-requirements.txt b/test-requirements.txt
index a1fa98917872..3bb9cf29635f 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,16 +1,87 @@
--r mypy-requirements.txt
--r build-requirements.txt
-attrs>=18.0
-black==23.9.1  # must match version in .pre-commit-config.yaml
-filelock>=3.3.0
-# lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014
-lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12'
-pre-commit
+#
+# This file is autogenerated by pip-compile with Python 3.11
+# by the following command:
+#
+#    pip-compile --allow-unsafe --output-file=test-requirements.txt --strip-extras test-requirements.in
+#
+attrs==23.1.0
+    # via -r test-requirements.in
+black==23.9.1
+    # via -r test-requirements.in
+cfgv==3.4.0
+    # via pre-commit
+click==8.1.7
+    # via black
+coverage==7.3.2
+    # via pytest-cov
+distlib==0.3.7
+    # via virtualenv
+execnet==2.0.2
+    # via pytest-xdist
+filelock==3.12.4
+    # via
+    #   -r test-requirements.in
+    #   virtualenv
+identify==2.5.30
+    # via pre-commit
+iniconfig==2.0.0
+    # via pytest
+lxml==4.9.2 ; (python_version < "3.11" or sys_platform != "win32") and python_version < "3.12"
+    # via -r test-requirements.in
+mypy-extensions==1.0.0
+    # via
+    #   -r mypy-requirements.txt
+    #   black
+nodeenv==1.8.0
+    # via pre-commit
+packaging==23.2
+    # via
+    #   black
+    #   pytest
+pathspec==0.11.2
+    # via black
+platformdirs==3.11.0
+    # via
+    #   black
+    #   virtualenv
+pluggy==1.3.0
+    # via pytest
+pre-commit==3.5.0
+    # via -r test-requirements.in
 pre-commit-hooks==4.5.0
-psutil>=4.0
-pytest>=7.4.0
-pytest-xdist>=1.34.0
-pytest-cov>=2.10.0
-ruff==0.1.0  # must match version in .pre-commit-config.yaml
-setuptools>=65.5.1
-tomli>=1.1.0  # needed even on py311+ so the self check passes with --python-version 3.7
+    # via -r test-requirements.in
+psutil==5.9.6
+    # via -r test-requirements.in
+pytest==7.4.2
+    # via
+    #   -r test-requirements.in
+    #   pytest-cov
+    #   pytest-xdist
+pytest-cov==4.1.0
+    # via -r test-requirements.in
+pytest-xdist==3.3.1
+    # via -r test-requirements.in
+pyyaml==6.0.1
+    # via pre-commit
+ruamel-yaml==0.17.40
+    # via pre-commit-hooks
+ruamel-yaml-clib==0.2.8
+    # via ruamel-yaml
+ruff==0.1.0
+    # via -r test-requirements.in
+tomli==2.0.1
+    # via -r test-requirements.in
+types-psutil==5.9.5.17
+    # via -r build-requirements.txt
+types-setuptools==68.2.0.0
+    # via -r build-requirements.txt
+typing-extensions==4.8.0
+    # via -r mypy-requirements.txt
+virtualenv==20.24.5
+    # via pre-commit
+
+# The following packages are considered to be unsafe in a requirements file:
+setuptools==68.2.2
+    # via
+    #   -r test-requirements.in
+    #   nodeenv

From a3af87bf252f0ed0c6e0f977ad4079418b37a70f Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 21 Oct 2023 16:41:10 +0100
Subject: [PATCH 190/288] Narrow tuple types using len() (#16237)

Fixes #1178
Supersedes #10367

This is includes implementation for fixed length tuples, homogeneous
tuples, and variadic tuples (and combinations of those). Generally
implementation is straightforward. Some notes:
* Unfortunately, it is necessary to add a new attribute `min_len` to
`TypeVarTupleType`, which is probably fine, as it doesn't have that many
attributes so far.
* Supporting more general use cases (like `>` comparisons for variadic
tuples) can cause quick proliferation of unions. I added two mechanisms
to counteract this: not applying the narrowing if the integer literal in
comparison is itself large, and collapsing unions of tuples into a
single tuple (if possible) after we are done with the narrowing. This
looks a bit arbitrary, but I think it is important to have.
* Main missing feature here is probably not inferring type information
from indirect comparisons like `len(x) > foo() > 1`. Supporting this
kind of things in full generality is cumbersome, and we may add cases
that turn out to be important later.
* Note I am quite careful with indexing "inside" a `TypeVarTuple`, it is
not really needed now, but I wanted to make everything future proof, so
that it will be easy to add support for upper bounds for
`TypeVarTuple`s, like `Nums = TypeVarTuple("Nums", bound=tuple[float,
...])`.
* I also fix couple existing inconsistencies with `Any` handling in type
narrowing. It looks like they stem from the old incorrect logic that
meet of `Any` and `X` should be `X`, while in fact it should be `Any`.
These fixes are not strictly necessary, but otherwise there may be new
false positives, because I introduce a bunch of additional type
narrowing scenarios here.

cc @hatal175, thanks for the test cases, and for your nice first attempt
to implement this!
Co-authored-by: Tal Hayon <talhayon1@gmail.com>
---
 mypy/binder.py                        |  83 ++++
 mypy/checker.py                       | 359 +++++++++++++++-
 mypy/checkexpr.py                     |  53 ++-
 mypy/meet.py                          |   6 +-
 mypy/operators.py                     |  23 +
 mypy/options.py                       |   3 +-
 mypy/subtypes.py                      |   2 +-
 mypy/test/testcheck.py                |   2 +-
 mypy/typeops.py                       |   2 +-
 mypy/types.py                         |  27 +-
 mypy_self_check.ini                   |   1 +
 test-data/unit/check-expressions.test |  13 +
 test-data/unit/check-namedtuple.test  |   2 +-
 test-data/unit/check-narrowing.test   | 576 ++++++++++++++++++++++++++
 test-data/unit/fixtures/len.pyi       |  39 ++
 test-data/unit/lib-stub/typing.pyi    |   1 +
 16 files changed, 1154 insertions(+), 38 deletions(-)
 create mode 100644 test-data/unit/fixtures/len.pyi

diff --git a/mypy/binder.py b/mypy/binder.py
index 8a68f24f661e..3b67d09f16c3 100644
--- a/mypy/binder.py
+++ b/mypy/binder.py
@@ -12,12 +12,17 @@
 from mypy.subtypes import is_same_type, is_subtype
 from mypy.types import (
     AnyType,
+    Instance,
     NoneType,
     PartialType,
+    ProperType,
+    TupleType,
     Type,
     TypeOfAny,
     TypeType,
     UnionType,
+    UnpackType,
+    find_unpack_in_list,
     get_proper_type,
 )
 from mypy.typevars import fill_typevars_with_any
@@ -213,6 +218,24 @@ def update_from_options(self, frames: list[Frame]) -> bool:
                 for other in resulting_values[1:]:
                     assert other is not None
                     type = join_simple(self.declarations[key], type, other)
+                    # Try simplifying resulting type for unions involving variadic tuples.
+                    # Technically, everything is still valid without this step, but if we do
+                    # not do this, this may create long unions after exiting an if check like:
+                    #     x: tuple[int, ...]
+                    #     if len(x) < 10:
+                    #         ...
+                    # We want the type of x to be tuple[int, ...] after this block (if it is
+                    # still equivalent to such type).
+                    if isinstance(type, UnionType):
+                        type = collapse_variadic_union(type)
+                    if isinstance(type, ProperType) and isinstance(type, UnionType):
+                        # Simplify away any extra Any's that were added to the declared
+                        # type when popping a frame.
+                        simplified = UnionType.make_union(
+                            [t for t in type.items if not isinstance(get_proper_type(t), AnyType)]
+                        )
+                        if simplified == self.declarations[key]:
+                            type = simplified
             if current_value is None or not is_same_type(type, current_value):
                 self._put(key, type)
                 changed = True
@@ -453,3 +476,63 @@ def get_declaration(expr: BindableExpression) -> Type | None:
         elif isinstance(expr.node, TypeInfo):
             return TypeType(fill_typevars_with_any(expr.node))
     return None
+
+
+def collapse_variadic_union(typ: UnionType) -> Type:
+    """Simplify a union involving variadic tuple if possible.
+
+    This will collapse a type like e.g.
+        tuple[X, Z] | tuple[X, Y, Z] | tuple[X, Y, Y, *tuple[Y, ...], Z]
+    back to
+        tuple[X, *tuple[Y, ...], Z]
+    which is equivalent, but much simpler form of the same type.
+    """
+    tuple_items = []
+    other_items = []
+    for t in typ.items:
+        p_t = get_proper_type(t)
+        if isinstance(p_t, TupleType):
+            tuple_items.append(p_t)
+        else:
+            other_items.append(t)
+    if len(tuple_items) <= 1:
+        # This type cannot be simplified further.
+        return typ
+    tuple_items = sorted(tuple_items, key=lambda t: len(t.items))
+    first = tuple_items[0]
+    last = tuple_items[-1]
+    unpack_index = find_unpack_in_list(last.items)
+    if unpack_index is None:
+        return typ
+    unpack = last.items[unpack_index]
+    assert isinstance(unpack, UnpackType)
+    unpacked = get_proper_type(unpack.type)
+    if not isinstance(unpacked, Instance):
+        return typ
+    assert unpacked.type.fullname == "builtins.tuple"
+    suffix = last.items[unpack_index + 1 :]
+
+    # Check that first item matches the expected pattern and infer prefix.
+    if len(first.items) < len(suffix):
+        return typ
+    if suffix and first.items[-len(suffix) :] != suffix:
+        return typ
+    if suffix:
+        prefix = first.items[: -len(suffix)]
+    else:
+        prefix = first.items
+
+    # Check that all middle types match the expected pattern as well.
+    arg = unpacked.args[0]
+    for i, it in enumerate(tuple_items[1:-1]):
+        if it.items != prefix + [arg] * (i + 1) + suffix:
+            return typ
+
+    # Check the last item (the one with unpack), and choose an appropriate simplified type.
+    if last.items != prefix + [arg] * (len(typ.items) - 1) + [unpack] + suffix:
+        return typ
+    if len(first.items) == 0:
+        simplified: Type = unpacked.copy_modified()
+    else:
+        simplified = TupleType(prefix + [unpack] + suffix, fallback=last.partial_fallback)
+    return UnionType.make_union([simplified] + other_items)
diff --git a/mypy/checker.py b/mypy/checker.py
index e1b65a95ae98..02bab37aa13f 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -43,7 +43,7 @@
 from mypy.expandtype import expand_self_type, expand_type, expand_type_by_instance
 from mypy.literals import Key, extract_var_from_literal_hash, literal, literal_hash
 from mypy.maptype import map_instance_to_supertype
-from mypy.meet import is_overlapping_erased_types, is_overlapping_types
+from mypy.meet import is_overlapping_erased_types, is_overlapping_types, meet_types
 from mypy.message_registry import ErrorMessage
 from mypy.messages import (
     SUGGESTED_TEST_FIXTURES,
@@ -134,7 +134,8 @@
     YieldExpr,
     is_final_node,
 )
-from mypy.options import Options
+from mypy.operators import flip_ops, int_op_to_method, neg_ops
+from mypy.options import PRECISE_TUPLE_TYPES, Options
 from mypy.patterns import AsPattern, StarredPattern
 from mypy.plugin import CheckerPluginInterface, Plugin
 from mypy.plugins import dataclasses as dataclasses_plugin
@@ -228,6 +229,9 @@
 
 DEFAULT_LAST_PASS: Final = 1  # Pass numbers start at 0
 
+# Maximum length of fixed tuple types inferred when narrowing from variadic tuples.
+MAX_PRECISE_TUPLE_SIZE: Final = 8
+
 DeferredNodeType: _TypeAlias = Union[FuncDef, LambdaExpr, OverloadedFuncDef, Decorator]
 FineGrainedDeferredNodeType: _TypeAlias = Union[FuncDef, MypyFile, OverloadedFuncDef]
 
@@ -5829,7 +5833,15 @@ def has_no_custom_eq_checks(t: Type) -> bool:
 
                 partial_type_maps.append((if_map, else_map))
 
-            return reduce_conditional_maps(partial_type_maps)
+            # If we have found non-trivial restrictions from the regular comparisons,
+            # then return soon. Otherwise try to infer restrictions involving `len(x)`.
+            # TODO: support regular and len() narrowing in the same chain.
+            if any(m != ({}, {}) for m in partial_type_maps):
+                return reduce_conditional_maps(partial_type_maps)
+            else:
+                # Use meet for `and` maps to get correct results for chained checks
+                # like `if 1 < len(x) < 4: ...`
+                return reduce_conditional_maps(self.find_tuple_len_narrowing(node), use_meet=True)
         elif isinstance(node, AssignmentExpr):
             if_map = {}
             else_map = {}
@@ -5860,7 +5872,10 @@ def has_no_custom_eq_checks(t: Type) -> bool:
             # and false if at least one of e1 and e2 is false.
             return (
                 and_conditional_maps(left_if_vars, right_if_vars),
-                or_conditional_maps(left_else_vars, right_else_vars),
+                # Note that if left else type is Any, we can't add any additional
+                # types to it, since the right maps were computed assuming
+                # the left is True, which may be not the case in the else branch.
+                or_conditional_maps(left_else_vars, right_else_vars, coalesce_any=True),
             )
         elif isinstance(node, OpExpr) and node.op == "or":
             left_if_vars, left_else_vars = self.find_isinstance_check(node.left)
@@ -5875,6 +5890,27 @@ def has_no_custom_eq_checks(t: Type) -> bool:
         elif isinstance(node, UnaryExpr) and node.op == "not":
             left, right = self.find_isinstance_check(node.expr)
             return right, left
+        elif (
+            literal(node) == LITERAL_TYPE
+            and self.has_type(node)
+            and self.can_be_narrowed_with_len(self.lookup_type(node))
+            # Only translate `if x` to `if len(x) > 0` when possible.
+            and not custom_special_method(self.lookup_type(node), "__bool__")
+            and self.options.strict_optional
+        ):
+            # Combine a `len(x) > 0` check with the default logic below.
+            yes_type, no_type = self.narrow_with_len(self.lookup_type(node), ">", 0)
+            if yes_type is not None:
+                yes_type = true_only(yes_type)
+            else:
+                yes_type = UninhabitedType()
+            if no_type is not None:
+                no_type = false_only(no_type)
+            else:
+                no_type = UninhabitedType()
+            if_map = {node: yes_type} if not isinstance(yes_type, UninhabitedType) else None
+            else_map = {node: no_type} if not isinstance(no_type, UninhabitedType) else None
+            return if_map, else_map
 
         # Restrict the type of the variable to True-ish/False-ish in the if and else branches
         # respectively
@@ -6221,6 +6257,287 @@ def refine_away_none_in_comparison(
 
         return if_map, {}
 
+    def is_len_of_tuple(self, expr: Expression) -> bool:
+        """Is this expression a `len(x)` call where x is a tuple or union of tuples?"""
+        if not isinstance(expr, CallExpr):
+            return False
+        if not refers_to_fullname(expr.callee, "builtins.len"):
+            return False
+        if len(expr.args) != 1:
+            return False
+        expr = expr.args[0]
+        if literal(expr) != LITERAL_TYPE:
+            return False
+        if not self.has_type(expr):
+            return False
+        return self.can_be_narrowed_with_len(self.lookup_type(expr))
+
+    def can_be_narrowed_with_len(self, typ: Type) -> bool:
+        """Is this a type that can benefit from length check type restrictions?
+
+        Currently supported types are TupleTypes, Instances of builtins.tuple, and
+        unions involving such types.
+        """
+        if custom_special_method(typ, "__len__"):
+            # If user overrides builtin behavior, we can't do anything.
+            return False
+        p_typ = get_proper_type(typ)
+        # Note: we are conservative about tuple subclasses, because some code may rely on
+        # the fact that tuple_type of fallback TypeInfo matches the original TupleType.
+        if isinstance(p_typ, TupleType):
+            if any(isinstance(t, UnpackType) for t in p_typ.items):
+                return p_typ.partial_fallback.type.fullname == "builtins.tuple"
+            return True
+        if isinstance(p_typ, Instance):
+            return p_typ.type.has_base("builtins.tuple")
+        if isinstance(p_typ, UnionType):
+            return any(self.can_be_narrowed_with_len(t) for t in p_typ.items)
+        return False
+
+    def literal_int_expr(self, expr: Expression) -> int | None:
+        """Is this expression an int literal, or a reference to an int constant?
+
+        If yes, return the corresponding int value, otherwise return None.
+        """
+        if not self.has_type(expr):
+            return None
+        expr_type = self.lookup_type(expr)
+        expr_type = coerce_to_literal(expr_type)
+        proper_type = get_proper_type(expr_type)
+        if not isinstance(proper_type, LiteralType):
+            return None
+        if not isinstance(proper_type.value, int):
+            return None
+        return proper_type.value
+
+    def find_tuple_len_narrowing(self, node: ComparisonExpr) -> list[tuple[TypeMap, TypeMap]]:
+        """Top-level logic to find type restrictions from a length check on tuples.
+
+        We try to detect `if` checks like the following:
+            x: tuple[int, int] | tuple[int, int, int]
+            y: tuple[int, int] | tuple[int, int, int]
+            if len(x) == len(y) == 2:
+                a, b = x  # OK
+                c, d = y  # OK
+
+            z: tuple[int, ...]
+            if 1 < len(z) < 4:
+                x = z  # OK
+        and report corresponding type restrictions to the binder.
+        """
+        # First step: group consecutive `is` and `==` comparisons together.
+        # This is essentially a simplified version of group_comparison_operands(),
+        # tuned to the len()-like checks. Note that we don't propagate indirect
+        # restrictions like e.g. `len(x) > foo() > 1` yet, since it is tricky.
+        # TODO: propagate indirect len() comparison restrictions.
+        chained = []
+        last_group = set()
+        for op, left, right in node.pairwise():
+            if isinstance(left, AssignmentExpr):
+                left = left.value
+            if isinstance(right, AssignmentExpr):
+                right = right.value
+            if op in ("is", "=="):
+                last_group.add(left)
+                last_group.add(right)
+            else:
+                if last_group:
+                    chained.append(("==", list(last_group)))
+                    last_group = set()
+                if op in {"is not", "!=", "<", "<=", ">", ">="}:
+                    chained.append((op, [left, right]))
+        if last_group:
+            chained.append(("==", list(last_group)))
+
+        # Second step: infer type restrictions from each group found above.
+        type_maps = []
+        for op, items in chained:
+            # TODO: support unions of literal types as len() comparison targets.
+            if not any(self.literal_int_expr(it) is not None for it in items):
+                continue
+            if not any(self.is_len_of_tuple(it) for it in items):
+                continue
+
+            # At this step we know there is at least one len(x) and one literal in the group.
+            if op in ("is", "=="):
+                literal_values = set()
+                tuples = []
+                for it in items:
+                    lit = self.literal_int_expr(it)
+                    if lit is not None:
+                        literal_values.add(lit)
+                        continue
+                    if self.is_len_of_tuple(it):
+                        assert isinstance(it, CallExpr)
+                        tuples.append(it.args[0])
+                if len(literal_values) > 1:
+                    # More than one different literal value found, like 1 == len(x) == 2,
+                    # so the corresponding branch is unreachable.
+                    return [(None, {})]
+                size = literal_values.pop()
+                if size > MAX_PRECISE_TUPLE_SIZE:
+                    # Avoid creating huge tuples from checks like if len(x) == 300.
+                    continue
+                for tpl in tuples:
+                    yes_type, no_type = self.narrow_with_len(self.lookup_type(tpl), op, size)
+                    yes_map = None if yes_type is None else {tpl: yes_type}
+                    no_map = None if no_type is None else {tpl: no_type}
+                    type_maps.append((yes_map, no_map))
+            else:
+                left, right = items
+                if self.is_len_of_tuple(right):
+                    # Normalize `1 < len(x)` and similar as `len(x) > 1`.
+                    left, right = right, left
+                    op = flip_ops.get(op, op)
+                r_size = self.literal_int_expr(right)
+                assert r_size is not None
+                if r_size > MAX_PRECISE_TUPLE_SIZE:
+                    # Avoid creating huge unions from checks like if len(x) > 300.
+                    continue
+                assert isinstance(left, CallExpr)
+                yes_type, no_type = self.narrow_with_len(
+                    self.lookup_type(left.args[0]), op, r_size
+                )
+                yes_map = None if yes_type is None else {left.args[0]: yes_type}
+                no_map = None if no_type is None else {left.args[0]: no_type}
+                type_maps.append((yes_map, no_map))
+        return type_maps
+
+    def narrow_with_len(self, typ: Type, op: str, size: int) -> tuple[Type | None, Type | None]:
+        """Dispatch tuple type narrowing logic depending on the kind of type we got."""
+        typ = get_proper_type(typ)
+        if isinstance(typ, TupleType):
+            return self.refine_tuple_type_with_len(typ, op, size)
+        elif isinstance(typ, Instance):
+            return self.refine_instance_type_with_len(typ, op, size)
+        elif isinstance(typ, UnionType):
+            yes_types = []
+            no_types = []
+            other_types = []
+            for t in typ.items:
+                if not self.can_be_narrowed_with_len(t):
+                    other_types.append(t)
+                    continue
+                yt, nt = self.narrow_with_len(t, op, size)
+                if yt is not None:
+                    yes_types.append(yt)
+                if nt is not None:
+                    no_types.append(nt)
+            yes_types += other_types
+            no_types += other_types
+            if yes_types:
+                yes_type = make_simplified_union(yes_types)
+            else:
+                yes_type = None
+            if no_types:
+                no_type = make_simplified_union(no_types)
+            else:
+                no_type = None
+            return yes_type, no_type
+        else:
+            assert False, "Unsupported type for len narrowing"
+
+    def refine_tuple_type_with_len(
+        self, typ: TupleType, op: str, size: int
+    ) -> tuple[Type | None, Type | None]:
+        """Narrow a TupleType using length restrictions."""
+        unpack_index = find_unpack_in_list(typ.items)
+        if unpack_index is None:
+            # For fixed length tuple situation is trivial, it is either reachable or not,
+            # depending on the current length, expected length, and the comparison op.
+            method = int_op_to_method[op]
+            if method(typ.length(), size):
+                return typ, None
+            return None, typ
+        unpack = typ.items[unpack_index]
+        assert isinstance(unpack, UnpackType)
+        unpacked = get_proper_type(unpack.type)
+        if isinstance(unpacked, TypeVarTupleType):
+            # For tuples involving TypeVarTuple unpack we can't do much except
+            # inferring reachability, and recording the restrictions on TypeVarTuple
+            # for further "manual" use elsewhere.
+            min_len = typ.length() - 1 + unpacked.min_len
+            if op in ("==", "is"):
+                if min_len <= size:
+                    return typ, typ
+                return None, typ
+            elif op in ("<", "<="):
+                if op == "<=":
+                    size += 1
+                if min_len < size:
+                    prefix = typ.items[:unpack_index]
+                    suffix = typ.items[unpack_index + 1 :]
+                    # TODO: also record max_len to avoid false negatives?
+                    unpack = UnpackType(unpacked.copy_modified(min_len=size - typ.length() + 1))
+                    return typ, typ.copy_modified(items=prefix + [unpack] + suffix)
+                return None, typ
+            else:
+                yes_type, no_type = self.refine_tuple_type_with_len(typ, neg_ops[op], size)
+                return no_type, yes_type
+        # Homogeneous variadic item is the case where we are most flexible. Essentially,
+        # we adjust the variadic item by "eating away" from it to satisfy the restriction.
+        assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple"
+        min_len = typ.length() - 1
+        arg = unpacked.args[0]
+        prefix = typ.items[:unpack_index]
+        suffix = typ.items[unpack_index + 1 :]
+        if op in ("==", "is"):
+            if min_len <= size:
+                # TODO: return fixed union + prefixed variadic tuple for no_type?
+                return typ.copy_modified(items=prefix + [arg] * (size - min_len) + suffix), typ
+            return None, typ
+        elif op in ("<", "<="):
+            if op == "<=":
+                size += 1
+            if min_len < size:
+                # Note: there is some ambiguity w.r.t. to where to put the additional
+                # items: before or after the unpack. However, such types are equivalent,
+                # so we always put them before for consistency.
+                no_type = typ.copy_modified(
+                    items=prefix + [arg] * (size - min_len) + [unpack] + suffix
+                )
+                yes_items = []
+                for n in range(size - min_len):
+                    yes_items.append(typ.copy_modified(items=prefix + [arg] * n + suffix))
+                return UnionType.make_union(yes_items, typ.line, typ.column), no_type
+            return None, typ
+        else:
+            yes_type, no_type = self.refine_tuple_type_with_len(typ, neg_ops[op], size)
+            return no_type, yes_type
+
+    def refine_instance_type_with_len(
+        self, typ: Instance, op: str, size: int
+    ) -> tuple[Type | None, Type | None]:
+        """Narrow a homogeneous tuple using length restrictions."""
+        base = map_instance_to_supertype(typ, self.lookup_typeinfo("builtins.tuple"))
+        arg = base.args[0]
+        # Again, we are conservative about subclasses until we gain more confidence.
+        allow_precise = (
+            PRECISE_TUPLE_TYPES in self.options.enable_incomplete_feature
+        ) and typ.type.fullname == "builtins.tuple"
+        if op in ("==", "is"):
+            # TODO: return fixed union + prefixed variadic tuple for no_type?
+            return TupleType(items=[arg] * size, fallback=typ), typ
+        elif op in ("<", "<="):
+            if op == "<=":
+                size += 1
+            if allow_precise:
+                unpack = UnpackType(self.named_generic_type("builtins.tuple", [arg]))
+                no_type: Type | None = TupleType(items=[arg] * size + [unpack], fallback=typ)
+            else:
+                no_type = typ
+            if allow_precise:
+                items = []
+                for n in range(size):
+                    items.append(TupleType([arg] * n, fallback=typ))
+                yes_type: Type | None = UnionType.make_union(items, typ.line, typ.column)
+            else:
+                yes_type = typ
+            return yes_type, no_type
+        else:
+            yes_type, no_type = self.refine_instance_type_with_len(typ, neg_ops[op], size)
+            return no_type, yes_type
+
     #
     # Helpers
     #
@@ -7168,7 +7485,7 @@ def builtin_item_type(tp: Type) -> Type | None:
     return None
 
 
-def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
+def and_conditional_maps(m1: TypeMap, m2: TypeMap, use_meet: bool = False) -> TypeMap:
     """Calculate what information we can learn from the truth of (e1 and e2)
     in terms of the information that we can learn from the truth of e1 and
     the truth of e2.
@@ -7178,22 +7495,31 @@ def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
         # One of the conditions can never be true.
         return None
     # Both conditions can be true; combine the information. Anything
-    # we learn from either conditions's truth is valid. If the same
+    # we learn from either conditions' truth is valid. If the same
     # expression's type is refined by both conditions, we somewhat
-    # arbitrarily give precedence to m2. (In the future, we could use
-    # an intersection type.)
+    # arbitrarily give precedence to m2 unless m1 value is Any.
+    # In the future, we could use an intersection type or meet_types().
     result = m2.copy()
     m2_keys = {literal_hash(n2) for n2 in m2}
     for n1 in m1:
-        if literal_hash(n1) not in m2_keys:
+        if literal_hash(n1) not in m2_keys or isinstance(get_proper_type(m1[n1]), AnyType):
             result[n1] = m1[n1]
+    if use_meet:
+        # For now, meet common keys only if specifically requested.
+        # This is currently used for tuple types narrowing, where having
+        # a precise result is important.
+        for n1 in m1:
+            for n2 in m2:
+                if literal_hash(n1) == literal_hash(n2):
+                    result[n1] = meet_types(m1[n1], m2[n2])
     return result
 
 
-def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
+def or_conditional_maps(m1: TypeMap, m2: TypeMap, coalesce_any: bool = False) -> TypeMap:
     """Calculate what information we can learn from the truth of (e1 or e2)
     in terms of the information that we can learn from the truth of e1 and
-    the truth of e2.
+    the truth of e2. If coalesce_any is True, consider Any a supertype when
+    joining restrictions.
     """
 
     if m1 is None:
@@ -7208,11 +7534,16 @@ def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap:
     for n1 in m1:
         for n2 in m2:
             if literal_hash(n1) == literal_hash(n2):
-                result[n1] = make_simplified_union([m1[n1], m2[n2]])
+                if coalesce_any and isinstance(get_proper_type(m1[n1]), AnyType):
+                    result[n1] = m1[n1]
+                else:
+                    result[n1] = make_simplified_union([m1[n1], m2[n2]])
     return result
 
 
-def reduce_conditional_maps(type_maps: list[tuple[TypeMap, TypeMap]]) -> tuple[TypeMap, TypeMap]:
+def reduce_conditional_maps(
+    type_maps: list[tuple[TypeMap, TypeMap]], use_meet: bool = False
+) -> tuple[TypeMap, TypeMap]:
     """Reduces a list containing pairs of if/else TypeMaps into a single pair.
 
     We "and" together all of the if TypeMaps and "or" together the else TypeMaps. So
@@ -7243,7 +7574,7 @@ def reduce_conditional_maps(type_maps: list[tuple[TypeMap, TypeMap]]) -> tuple[T
     else:
         final_if_map, final_else_map = type_maps[0]
         for if_map, else_map in type_maps[1:]:
-            final_if_map = and_conditional_maps(final_if_map, if_map)
+            final_if_map = and_conditional_maps(final_if_map, if_map, use_meet=use_meet)
             final_else_map = or_conditional_maps(final_else_map, else_map)
 
         return final_if_map, final_else_map
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 1d5233170a10..2dc5a93a1de9 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -3643,6 +3643,14 @@ def dangerous_comparison(
                 left = map_instance_to_supertype(left, abstract_set)
                 right = map_instance_to_supertype(right, abstract_set)
                 return self.dangerous_comparison(left.args[0], right.args[0])
+            elif left.type.has_base("typing.Mapping") and right.type.has_base("typing.Mapping"):
+                # Similar to above: Mapping ignores the classes, it just compares items.
+                abstract_map = self.chk.lookup_typeinfo("typing.Mapping")
+                left = map_instance_to_supertype(left, abstract_map)
+                right = map_instance_to_supertype(right, abstract_map)
+                return self.dangerous_comparison(
+                    left.args[0], right.args[0]
+                ) or self.dangerous_comparison(left.args[1], right.args[1])
             elif left_name in ("builtins.list", "builtins.tuple") and right_name == left_name:
                 return self.dangerous_comparison(left.args[0], right.args[0])
             elif left_name in OVERLAPPING_BYTES_ALLOWLIST and right_name in (
@@ -4228,9 +4236,8 @@ def visit_index_with_type(
                     else:
                         self.chk.fail(message_registry.TUPLE_INDEX_OUT_OF_RANGE, e)
                         if any(isinstance(t, UnpackType) for t in left_type.items):
-                            self.chk.note(
-                                f"Variadic tuple can have length {left_type.length() - 1}", e
-                            )
+                            min_len = self.min_tuple_length(left_type)
+                            self.chk.note(f"Variadic tuple can have length {min_len}", e)
                         return AnyType(TypeOfAny.from_error)
                 return make_simplified_union(out)
             else:
@@ -4254,6 +4261,16 @@ def visit_index_with_type(
             e.method_type = method_type
             return result
 
+    def min_tuple_length(self, left: TupleType) -> int:
+        unpack_index = find_unpack_in_list(left.items)
+        if unpack_index is None:
+            return left.length()
+        unpack = left.items[unpack_index]
+        assert isinstance(unpack, UnpackType)
+        if isinstance(unpack.type, TypeVarTupleType):
+            return left.length() - 1 + unpack.type.min_len
+        return left.length() - 1
+
     def visit_tuple_index_helper(self, left: TupleType, n: int) -> Type | None:
         unpack_index = find_unpack_in_list(left.items)
         if unpack_index is None:
@@ -4267,31 +4284,39 @@ def visit_tuple_index_helper(self, left: TupleType, n: int) -> Type | None:
         unpacked = get_proper_type(unpack.type)
         if isinstance(unpacked, TypeVarTupleType):
             # Usually we say that TypeVarTuple can't be split, be in case of
-            # indexing it seems benign to just return the fallback item, similar
+            # indexing it seems benign to just return the upper bound item, similar
             # to what we do when indexing a regular TypeVar.
-            middle = unpacked.tuple_fallback.args[0]
+            bound = get_proper_type(unpacked.upper_bound)
+            assert isinstance(bound, Instance)
+            assert bound.type.fullname == "builtins.tuple"
+            middle = bound.args[0]
         else:
             assert isinstance(unpacked, Instance)
             assert unpacked.type.fullname == "builtins.tuple"
             middle = unpacked.args[0]
+
+        extra_items = self.min_tuple_length(left) - left.length() + 1
         if n >= 0:
-            if n < unpack_index:
-                return left.items[n]
-            if n >= len(left.items) - 1:
+            if n >= self.min_tuple_length(left):
                 # For tuple[int, *tuple[str, ...], int] we allow either index 0 or 1,
                 # since variadic item may have zero items.
                 return None
+            if n < unpack_index:
+                return left.items[n]
             return UnionType.make_union(
-                [middle] + left.items[unpack_index + 1 : n + 2], left.line, left.column
+                [middle]
+                + left.items[unpack_index + 1 : max(n - extra_items + 2, unpack_index + 1)],
+                left.line,
+                left.column,
             )
-        n += len(left.items)
-        if n <= 0:
+        n += self.min_tuple_length(left)
+        if n < 0:
             # Similar to above, we only allow -1, and -2 for tuple[int, *tuple[str, ...], int]
             return None
-        if n > unpack_index:
-            return left.items[n]
+        if n >= unpack_index + extra_items:
+            return left.items[n - extra_items + 1]
         return UnionType.make_union(
-            left.items[n - 1 : unpack_index] + [middle], left.line, left.column
+            left.items[min(n, unpack_index) : unpack_index] + [middle], left.line, left.column
         )
 
     def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type:
diff --git a/mypy/meet.py b/mypy/meet.py
index 0fa500d32c30..e3645c7b5879 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -221,6 +221,8 @@ def get_possible_variants(typ: Type) -> list[Type]:
             return [typ.upper_bound]
     elif isinstance(typ, ParamSpecType):
         return [typ.upper_bound]
+    elif isinstance(typ, TypeVarTupleType):
+        return [typ.upper_bound]
     elif isinstance(typ, UnionType):
         return list(typ.items)
     elif isinstance(typ, Overloaded):
@@ -694,8 +696,8 @@ def visit_param_spec(self, t: ParamSpecType) -> ProperType:
             return self.default(self.s)
 
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType:
-        if self.s == t:
-            return self.s
+        if isinstance(self.s, TypeVarTupleType) and self.s.id == t.id:
+            return self.s if self.s.min_len > t.min_len else t
         else:
             return self.default(self.s)
 
diff --git a/mypy/operators.py b/mypy/operators.py
index 07ec5a24fa77..d1f050b58fae 100644
--- a/mypy/operators.py
+++ b/mypy/operators.py
@@ -101,3 +101,26 @@
 reverse_op_method_set: Final = set(reverse_op_methods.values())
 
 unary_op_methods: Final = {"-": "__neg__", "+": "__pos__", "~": "__invert__"}
+
+int_op_to_method: Final = {
+    "==": int.__eq__,
+    "is": int.__eq__,
+    "<": int.__lt__,
+    "<=": int.__le__,
+    "!=": int.__ne__,
+    "is not": int.__ne__,
+    ">": int.__gt__,
+    ">=": int.__ge__,
+}
+
+flip_ops: Final = {"<": ">", "<=": ">=", ">": "<", ">=": "<="}
+neg_ops: Final = {
+    "==": "!=",
+    "!=": "==",
+    "is": "is not",
+    "is not": "is",
+    "<": ">=",
+    "<=": ">",
+    ">": "<=",
+    ">=": "<",
+}
diff --git a/mypy/options.py b/mypy/options.py
index 603ba79935ee..cb0464d4dc06 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -72,7 +72,8 @@ class BuildType:
 # Features that are currently incomplete/experimental
 TYPE_VAR_TUPLE: Final = "TypeVarTuple"
 UNPACK: Final = "Unpack"
-INCOMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK))
+PRECISE_TUPLE_TYPES: Final = "PreciseTupleTypes"
+INCOMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK, PRECISE_TUPLE_TYPES))
 
 
 class Options:
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 638553883dd8..b79e0e628849 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -640,7 +640,7 @@ def visit_param_spec(self, left: ParamSpecType) -> bool:
     def visit_type_var_tuple(self, left: TypeVarTupleType) -> bool:
         right = self.right
         if isinstance(right, TypeVarTupleType) and right.id == left.id:
-            return True
+            return left.min_len >= right.min_len
         return self._is_subtype(left.upper_bound, self.right)
 
     def visit_unpack_type(self, left: UnpackType) -> bool:
diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py
index 85fbe5dc2990..591421465a97 100644
--- a/mypy/test/testcheck.py
+++ b/mypy/test/testcheck.py
@@ -126,7 +126,7 @@ def run_case_once(
         options = parse_options(original_program_text, testcase, incremental_step)
         options.use_builtins_fixtures = True
         if not testcase.name.endswith("_no_incomplete"):
-            options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK]
+            options.enable_incomplete_feature += [TYPE_VAR_TUPLE, UNPACK]
         options.show_traceback = True
 
         # Enable some options automatically based on test file name.
diff --git a/mypy/typeops.py b/mypy/typeops.py
index 37817933a397..dff43775fe3d 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -981,7 +981,7 @@ def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool
         method = typ.type.get(name)
         if method and isinstance(method.node, (SYMBOL_FUNCBASE_TYPES, Decorator, Var)):
             if method.node.info:
-                return not method.node.info.fullname.startswith("builtins.")
+                return not method.node.info.fullname.startswith(("builtins.", "typing."))
         return False
     if isinstance(typ, UnionType):
         if check_all:
diff --git a/mypy/types.py b/mypy/types.py
index d0c19a08e60a..d08e9e7a890c 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -802,6 +802,8 @@ class TypeVarTupleType(TypeVarLikeType):
     See PEP646 for more information.
     """
 
+    __slots__ = ("tuple_fallback", "min_len")
+
     def __init__(
         self,
         name: str,
@@ -813,9 +815,13 @@ def __init__(
         *,
         line: int = -1,
         column: int = -1,
+        min_len: int = 0,
     ) -> None:
         super().__init__(name, fullname, id, upper_bound, default, line=line, column=column)
         self.tuple_fallback = tuple_fallback
+        # This value is not settable by a user. It is an internal-only thing to support
+        # len()-narrowing of variadic tuples.
+        self.min_len = min_len
 
     def serialize(self) -> JsonDict:
         assert not self.id.is_meta_var()
@@ -827,6 +833,7 @@ def serialize(self) -> JsonDict:
             "upper_bound": self.upper_bound.serialize(),
             "tuple_fallback": self.tuple_fallback.serialize(),
             "default": self.default.serialize(),
+            "min_len": self.min_len,
         }
 
     @classmethod
@@ -839,18 +846,19 @@ def deserialize(cls, data: JsonDict) -> TypeVarTupleType:
             deserialize_type(data["upper_bound"]),
             Instance.deserialize(data["tuple_fallback"]),
             deserialize_type(data["default"]),
+            min_len=data["min_len"],
         )
 
     def accept(self, visitor: TypeVisitor[T]) -> T:
         return visitor.visit_type_var_tuple(self)
 
     def __hash__(self) -> int:
-        return hash(self.id)
+        return hash((self.id, self.min_len))
 
     def __eq__(self, other: object) -> bool:
         if not isinstance(other, TypeVarTupleType):
             return NotImplemented
-        return self.id == other.id
+        return self.id == other.id and self.min_len == other.min_len
 
     def copy_modified(
         self,
@@ -858,6 +866,7 @@ def copy_modified(
         id: Bogus[TypeVarId | int] = _dummy,
         upper_bound: Bogus[Type] = _dummy,
         default: Bogus[Type] = _dummy,
+        min_len: Bogus[int] = _dummy,
         **kwargs: Any,
     ) -> TypeVarTupleType:
         return TypeVarTupleType(
@@ -869,6 +878,7 @@ def copy_modified(
             self.default if default is _dummy else default,
             line=self.line,
             column=self.column,
+            min_len=self.min_len if min_len is _dummy else min_len,
         )
 
 
@@ -2354,7 +2364,18 @@ def can_be_false_default(self) -> bool:
             # Corner case: it is a `NamedTuple` with `__bool__` method defined.
             # It can be anything: both `True` and `False`.
             return True
-        return self.length() == 0
+        if self.length() == 0:
+            return True
+        if self.length() > 1:
+            return False
+        # Special case tuple[*Ts] may or may not be false.
+        item = self.items[0]
+        if not isinstance(item, UnpackType):
+            return False
+        if not isinstance(item.type, TypeVarTupleType):
+            # Non-normalized tuple[int, ...] can be false.
+            return True
+        return item.type.min_len == 0
 
     def can_be_any_bool(self) -> bool:
         return bool(
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
index 6e1ad8187b7a..093926d4c415 100644
--- a/mypy_self_check.ini
+++ b/mypy_self_check.ini
@@ -10,6 +10,7 @@ python_version = 3.8
 exclude = mypy/typeshed/|mypyc/test-data/|mypyc/lib-rt/
 new_type_inference = True
 enable_error_code = ignore-without-code,redundant-expr
+enable_incomplete_feature = PreciseTupleTypes
 show_error_code_links = True
 
 [mypy-mypy.visitor]
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
index a3c1bc8795f2..4ac5512580d2 100644
--- a/test-data/unit/check-expressions.test
+++ b/test-data/unit/check-expressions.test
@@ -2365,6 +2365,19 @@ b"x" in data
 [builtins fixtures/primitives.pyi]
 [typing fixtures/typing-full.pyi]
 
+[case testStrictEqualityWithDifferentMapTypes]
+# flags: --strict-equality
+from typing import Mapping
+
+class A(Mapping[int, str]): ...
+class B(Mapping[int, str]): ...
+
+a: A
+b: B
+assert a == b
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
 [case testUnimportedHintAny]
 def f(x: Any) -> None:  # E: Name "Any" is not defined \
                         # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any")
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
index 6e3628060617..9fa098b28dee 100644
--- a/test-data/unit/check-namedtuple.test
+++ b/test-data/unit/check-namedtuple.test
@@ -878,7 +878,7 @@ reveal_type(Child.class_method())  # N: Revealed type is "Tuple[builtins.str, fa
 [builtins fixtures/classmethod.pyi]
 
 [case testNamedTupleAsConditionalStrictOptionalDisabled]
-# flags: --no-strict-optional
+# flags: --no-strict-optional --warn-unreachable
 from typing import NamedTuple
 
 class C(NamedTuple):
diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test
index c86cffd453df..5b7fadf41c79 100644
--- a/test-data/unit/check-narrowing.test
+++ b/test-data/unit/check-narrowing.test
@@ -1334,3 +1334,579 @@ if isinstance(some, raw):
 else:
     reveal_type(some)  # N: Revealed type is "Union[builtins.int, __main__.Base]"
 [builtins fixtures/dict.pyi]
+
+[case testNarrowingWithAnyOps]
+from typing import Any
+
+class C: ...
+class D(C): ...
+tp: Any
+
+c: C
+if isinstance(c, tp) or isinstance(c, D):
+    reveal_type(c)  # N: Revealed type is "Union[Any, __main__.D]"
+else:
+    reveal_type(c)  # N: Revealed type is "__main__.C"
+reveal_type(c)  # N: Revealed type is "__main__.C"
+
+c1: C
+if isinstance(c1, tp) and isinstance(c1, D):
+    reveal_type(c1)  # N: Revealed type is "Any"
+else:
+    reveal_type(c1)  # N: Revealed type is "__main__.C"
+reveal_type(c1)  # N: Revealed type is "__main__.C"
+
+c2: C
+if isinstance(c2, D) or isinstance(c2, tp):
+    reveal_type(c2)  # N: Revealed type is "Union[__main__.D, Any]"
+else:
+    reveal_type(c2)  # N: Revealed type is "__main__.C"
+reveal_type(c2)  # N: Revealed type is "__main__.C"
+
+c3: C
+if isinstance(c3, D) and isinstance(c3, tp):
+    reveal_type(c3)  # N: Revealed type is "Any"
+else:
+    reveal_type(c3)  # N: Revealed type is "__main__.C"
+reveal_type(c3)  # N: Revealed type is "__main__.C"
+
+t: Any
+if isinstance(t, (list, tuple)) and isinstance(t, tuple):
+    reveal_type(t)  # N: Revealed type is "builtins.tuple[Any, ...]"
+else:
+    reveal_type(t)  # N: Revealed type is "Any"
+reveal_type(t)  # N: Revealed type is "Any"
+[builtins fixtures/isinstancelist.pyi]
+
+[case testNarrowingLenItemAndLenCompare]
+from typing import Any
+
+x: Any
+if len(x) == x:
+    reveal_type(x) # N: Revealed type is "Any"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenTuple]
+from typing import Tuple, Union
+
+VarTuple = Union[Tuple[int, int], Tuple[int, int, int]]
+
+x: VarTuple
+a = b = c = 0
+if len(x) == 3:
+    a, b, c = x
+else:
+    a, b = x
+
+if len(x) != 3:
+    a, b = x
+else:
+    a, b, c = x
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenHomogeneousTuple]
+from typing import Tuple
+
+x: Tuple[int, ...]
+if len(x) == 3:
+    reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]"
+else:
+    reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+
+if len(x) != 3:
+    reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+else:
+    reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenTypeUnaffected]
+from typing import Union, List
+
+x: Union[str, List[int]]
+if len(x) == 3:
+    reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.list[builtins.int]]"
+else:
+    reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.list[builtins.int]]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenAnyListElseNotAffected]
+from typing import Any
+
+def f(self, value: Any) -> Any:
+    if isinstance(value, list) and len(value) == 0:
+        reveal_type(value) # N: Revealed type is "builtins.list[Any]"
+        return value
+    reveal_type(value) # N: Revealed type is "Any"
+    return None
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenMultiple]
+from typing import Tuple, Union
+
+VarTuple = Union[Tuple[int, int], Tuple[int, int, int]]
+
+x: VarTuple
+y: VarTuple
+if len(x) == len(y) == 3:
+    reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]"
+    reveal_type(y) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenFinal]
+from typing import Tuple, Union
+from typing_extensions import Final
+
+VarTuple = Union[Tuple[int, int], Tuple[int, int, int]]
+
+x: VarTuple
+fin: Final = 3
+if len(x) == fin:
+    reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenGreaterThan]
+from typing import Tuple, Union
+
+VarTuple = Union[Tuple[int], Tuple[int, int], Tuple[int, int, int]]
+
+x: VarTuple
+if len(x) > 1:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+else:
+    reveal_type(x) # N: Revealed type is "Tuple[builtins.int]"
+
+if len(x) < 2:
+    reveal_type(x) # N: Revealed type is "Tuple[builtins.int]"
+else:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+
+if len(x) >= 2:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+else:
+    reveal_type(x) # N: Revealed type is "Tuple[builtins.int]"
+
+if len(x) <= 2:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int]]"
+else:
+    reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenBothSidesUnionTuples]
+from typing import Tuple, Union
+
+VarTuple = Union[
+    Tuple[int],
+    Tuple[int, int],
+    Tuple[int, int, int],
+    Tuple[int, int, int, int],
+]
+
+x: VarTuple
+if 2 <= len(x) <= 3:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+else:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int, builtins.int, builtins.int]]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenGreaterThanHomogeneousTupleShort]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
+from typing import Tuple
+
+VarTuple = Tuple[int, ...]
+
+x: VarTuple
+if len(x) < 3:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[()], Tuple[builtins.int], Tuple[builtins.int, builtins.int]]"
+else:
+    reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]"
+reveal_type(x)  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenBiggerThanHomogeneousTupleLong]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
+from typing import Tuple
+
+VarTuple = Tuple[int, ...]
+
+x: VarTuple
+if len(x) < 30:
+    reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+else:
+    reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenBothSidesHomogeneousTuple]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
+from typing import Tuple
+
+x: Tuple[int, ...]
+if 1 < len(x) < 4:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+else:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[()], Tuple[builtins.int], Tuple[builtins.int, builtins.int, builtins.int, builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]]"
+reveal_type(x)  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenUnionTupleUnreachable]
+# flags: --warn-unreachable
+from typing import Tuple, Union
+
+x: Union[Tuple[int, int], Tuple[int, int, int]]
+if len(x) >= 4:
+    reveal_type(x) # E: Statement is unreachable
+else:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+
+if len(x) < 2:
+    reveal_type(x) # E: Statement is unreachable
+else:
+    reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenMixedTypes]
+from typing import Tuple, List, Union
+
+x: Union[Tuple[int, int], Tuple[int, int, int], List[int]]
+a = b = c = 0
+if len(x) == 3:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int, builtins.int], builtins.list[builtins.int]]"
+    a, b, c = x
+else:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], builtins.list[builtins.int]]"
+    a, b = x
+
+if len(x) != 3:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], builtins.list[builtins.int]]"
+    a, b = x
+else:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int, builtins.int], builtins.list[builtins.int]]"
+    a, b, c = x
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenTypeVarTupleEquals]
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+def foo(x: Tuple[int, Unpack[Ts], str]) -> None:
+    if len(x) == 5:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+
+    if len(x) != 5:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenTypeVarTupleGreaterThan]
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+def foo(x: Tuple[int, Unpack[Ts], str]) -> None:
+    if len(x) > 5:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+        reveal_type(x[5])  # N: Revealed type is "builtins.object"
+        reveal_type(x[-6])  # N: Revealed type is "builtins.object"
+        reveal_type(x[-1])  # N: Revealed type is "builtins.str"
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+
+    if len(x) < 5:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+        x[5]  # E: Tuple index out of range \
+              # N: Variadic tuple can have length 5
+        x[-6]  # E: Tuple index out of range \
+               # N: Variadic tuple can have length 5
+    x[2]  # E: Tuple index out of range \
+          # N: Variadic tuple can have length 2
+    x[-3]  # E: Tuple index out of range \
+           # N: Variadic tuple can have length 2
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenTypeVarTupleUnreachable]
+# flags: --warn-unreachable
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+def foo(x: Tuple[int, Unpack[Ts], str]) -> None:
+    if len(x) == 1:
+        reveal_type(x)  # E: Statement is unreachable
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+
+    if len(x) != 1:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+    else:
+        reveal_type(x)  # E: Statement is unreachable
+
+def bar(x: Tuple[int, Unpack[Ts], str]) -> None:
+    if len(x) >= 2:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+    else:
+        reveal_type(x)  # E: Statement is unreachable
+
+    if len(x) < 2:
+        reveal_type(x)  # E: Statement is unreachable
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenVariadicTupleEquals]
+from typing import Tuple
+from typing_extensions import Unpack
+
+def foo(x: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None:
+    if len(x) == 4:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.float, builtins.float, builtins.str]"
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+
+    if len(x) != 4:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.float, builtins.float, builtins.str]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenVariadicTupleGreaterThan]
+from typing import Tuple
+from typing_extensions import Unpack
+
+def foo(x: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None:
+    if len(x) > 3:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.float, builtins.float, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+    else:
+        reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.str], Tuple[builtins.int, builtins.float, builtins.str]]"
+    reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+
+    if len(x) < 3:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.float, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+    reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenVariadicTupleUnreachable]
+# flags: --warn-unreachable
+from typing import Tuple
+from typing_extensions import Unpack
+
+def foo(x: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None:
+    if len(x) == 1:
+        reveal_type(x)  # E: Statement is unreachable
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+
+    if len(x) != 1:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+    else:
+        reveal_type(x)  # E: Statement is unreachable
+
+def bar(x: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None:
+    if len(x) >= 2:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+    else:
+        reveal_type(x)  # E: Statement is unreachable
+
+    if len(x) < 2:
+        reveal_type(x)  # E: Statement is unreachable
+    else:
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenBareExpressionPrecise]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
+from typing import Tuple
+
+x: Tuple[int, ...]
+assert x
+reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenBareExpressionTypeVarTuple]
+from typing import Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+def test(*xs: Unpack[Ts]) -> None:
+    assert xs
+    xs[0]  # OK
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenBareExpressionWithNonePrecise]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
+from typing import Tuple, Optional
+
+x: Optional[Tuple[int, ...]]
+if x:
+    reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]"
+else:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[()], None]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenBareExpressionWithNoneImprecise]
+from typing import Tuple, Optional
+
+x: Optional[Tuple[int, ...]]
+if x:
+    reveal_type(x)  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+else:
+    reveal_type(x)  # N: Revealed type is "Union[builtins.tuple[builtins.int, ...], None]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenMixWithAnyPrecise]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
+from typing import Any
+
+x: Any
+if isinstance(x, (list, tuple)) and len(x) == 0:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[()], builtins.list[Any]]"
+else:
+    reveal_type(x)  # N: Revealed type is "Any"
+reveal_type(x)  # N: Revealed type is "Any"
+
+x1: Any
+if isinstance(x1, (list, tuple)) and len(x1) > 1:
+    reveal_type(x1)  # N: Revealed type is "Union[Tuple[Any, Any, Unpack[builtins.tuple[Any, ...]]], builtins.list[Any]]"
+else:
+    reveal_type(x1)  # N: Revealed type is "Any"
+reveal_type(x1)  # N: Revealed type is "Any"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenMixWithAnyImprecise]
+from typing import Any
+
+x: Any
+if isinstance(x, (list, tuple)) and len(x) == 0:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[()], builtins.list[Any]]"
+else:
+    reveal_type(x)  # N: Revealed type is "Any"
+reveal_type(x)  # N: Revealed type is "Any"
+
+x1: Any
+if isinstance(x1, (list, tuple)) and len(x1) > 1:
+    reveal_type(x1)  # N: Revealed type is "Union[builtins.tuple[Any, ...], builtins.list[Any]]"
+else:
+    reveal_type(x1)  # N: Revealed type is "Any"
+reveal_type(x1)  # N: Revealed type is "Any"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenExplicitLiteralTypes]
+from typing import Tuple, Union
+from typing_extensions import Literal
+
+VarTuple = Union[
+    Tuple[int],
+    Tuple[int, int],
+    Tuple[int, int, int],
+]
+x: VarTuple
+
+supported: Literal[2]
+if len(x) == supported:
+    reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.int]"
+else:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+
+not_supported_yet: Literal[2, 3]
+if len(x) == not_supported_yet:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+else:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenUnionOfVariadicTuples]
+from typing import Tuple, Union
+
+x: Union[Tuple[int, ...], Tuple[str, ...]]
+if len(x) == 2:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.str, builtins.str]]"
+else:
+    reveal_type(x)  # N: Revealed type is "Union[builtins.tuple[builtins.int, ...], builtins.tuple[builtins.str, ...]]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenUnionOfNamedTuples]
+from typing import NamedTuple, Union
+
+class Point2D(NamedTuple):
+    x: int
+    y: int
+class Point3D(NamedTuple):
+    x: int
+    y: int
+    z: int
+
+x: Union[Point2D, Point3D]
+if len(x) == 2:
+    reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.int, fallback=__main__.Point2D]"
+else:
+    reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int, fallback=__main__.Point3D]"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenTupleSubclass]
+from typing import Tuple
+
+class Ints(Tuple[int, ...]):
+    size: int
+
+x: Ints
+if len(x) == 2:
+    reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.int, fallback=__main__.Ints]"
+    reveal_type(x.size)  # N: Revealed type is "builtins.int"
+else:
+    reveal_type(x)  # N: Revealed type is "__main__.Ints"
+    reveal_type(x.size)  # N: Revealed type is "builtins.int"
+
+reveal_type(x)  # N: Revealed type is "__main__.Ints"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenTupleSubclassCustomNotAllowed]
+from typing import Tuple
+
+class Ints(Tuple[int, ...]):
+    def __len__(self) -> int:
+        return 0
+
+x: Ints
+if len(x) > 2:
+    reveal_type(x)  # N: Revealed type is "__main__.Ints"
+else:
+    reveal_type(x)  # N: Revealed type is "__main__.Ints"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenTupleSubclassPreciseNotAllowed]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
+from typing import Tuple
+
+class Ints(Tuple[int, ...]):
+    size: int
+
+x: Ints
+if len(x) > 2:
+    reveal_type(x)  # N: Revealed type is "__main__.Ints"
+else:
+    reveal_type(x)  # N: Revealed type is "__main__.Ints"
+[builtins fixtures/len.pyi]
+
+[case testNarrowingLenUnknownLen]
+from typing import Any, Tuple, Union
+
+x: Union[Tuple[int, int], Tuple[int, int, int]]
+
+n: int
+if len(x) == n:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+else:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+
+a: Any
+if len(x) == a:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+else:
+    reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
+[builtins fixtures/len.pyi]
diff --git a/test-data/unit/fixtures/len.pyi b/test-data/unit/fixtures/len.pyi
new file mode 100644
index 000000000000..c72596661858
--- /dev/null
+++ b/test-data/unit/fixtures/len.pyi
@@ -0,0 +1,39 @@
+from typing import Tuple, TypeVar, Generic, Union, Type, Sequence, Mapping
+from typing_extensions import Protocol
+
+T = TypeVar("T")
+V = TypeVar("V")
+
+class object:
+    def __init__(self) -> None: pass
+
+class type:
+    def __init__(self, x) -> None: pass
+
+class tuple(Generic[T]):
+    def __len__(self) -> int: pass
+
+class list(Sequence[T]): pass
+class dict(Mapping[T, V]): pass
+
+class function: pass
+
+class Sized(Protocol):
+    def __len__(self) -> int: pass
+
+def len(__obj: Sized) -> int: ...
+def isinstance(x: object, t: Union[Type[object], Tuple[Type[object], ...]]) -> bool: pass
+
+class int:
+    def __add__(self, other: int) -> int: pass
+    def __eq__(self, other: int) -> bool: pass
+    def __ne__(self, other: int) -> bool: pass
+    def __lt__(self, n: int) -> bool: pass
+    def __gt__(self, n: int) -> bool: pass
+    def __le__(self, n: int) -> bool: pass
+    def __ge__(self, n: int) -> bool: pass
+    def __neg__(self) -> int: pass
+class float: pass
+class bool(int): pass
+class str(Sequence[str]): pass
+class ellipsis: pass
diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi
index b35b64a383c9..5f458ca687c0 100644
--- a/test-data/unit/lib-stub/typing.pyi
+++ b/test-data/unit/lib-stub/typing.pyi
@@ -48,6 +48,7 @@ class Generator(Iterator[T], Generic[T, U, V]):
 
 class Sequence(Iterable[T_co]):
     def __getitem__(self, n: Any) -> T_co: pass
+    def __len__(self) -> int: pass
 
 # Mapping type is oversimplified intentionally.
 class Mapping(Iterable[T], Generic[T, T_co]):

From 27c4b462aa4cf269397253eca7a88e7fbbf4e43e Mon Sep 17 00:00:00 2001
From: Ali Hamdan <ali.hamdan.dev@gmail.com>
Date: Sat, 21 Oct 2023 22:25:20 +0200
Subject: [PATCH 191/288] stubgen: fix missing property setter in semantic
 analysis mode (#16303)

The semantic analyzer treats properties as overloaded functions. This
was previously ignored by stubgen but regressed in #15232.
This PR restores the original behavior.

Fixes #16300
---
 mypy/stubgen.py             |  1 +
 mypy/stubutil.py            |  2 --
 test-data/unit/stubgen.test | 18 ++++++++++++++++++
 3 files changed, 19 insertions(+), 2 deletions(-)

diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index 395a49fa4e08..a2f07a35eaa2 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -633,6 +633,7 @@ def process_decorator(self, o: Decorator) -> None:
 
         Only preserve certain special decorators such as @abstractmethod.
         """
+        o.func.is_overload = False
         for decorator in o.original_decorators:
             if not isinstance(decorator, (NameExpr, MemberExpr)):
                 continue
diff --git a/mypy/stubutil.py b/mypy/stubutil.py
index 22e525c14e7c..cc3b63098fd2 100644
--- a/mypy/stubutil.py
+++ b/mypy/stubutil.py
@@ -669,8 +669,6 @@ def set_defined_names(self, defined_names: set[str]) -> None:
                 self.add_name(f"{pkg}.{t}", require=False)
 
     def check_undefined_names(self) -> None:
-        print(self._all_)
-        print(self._toplevel_names)
         undefined_names = [name for name in self._all_ or [] if name not in self._toplevel_names]
         if undefined_names:
             if self._output:
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index d83d74306230..64a1353b29b3 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -377,6 +377,24 @@ class A:
     def f(self, x) -> None: ...
     def h(self) -> None: ...
 
+[case testProperty_semanal]
+class A:
+    @property
+    def f(self):
+        return 1
+    @f.setter
+    def f(self, x): ...
+
+    def h(self):
+        self.f = 1
+[out]
+class A:
+    @property
+    def f(self): ...
+    @f.setter
+    def f(self, x) -> None: ...
+    def h(self) -> None: ...
+
 -- a read/write property is treated the same as an attribute
 [case testProperty_inspect]
 class A:

From 2d54024cb44556302b40fed6e0bd40fd9ef56563 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Sat, 21 Oct 2023 17:06:42 -0700
Subject: [PATCH 192/288] [mypyc] Don't crash on unreachable statements
 (#16311)

Skip them instead. This applies to statements after break, continue,
return and raise statements.

It's common to have unreachable statements temporarily while working on
a half-finished change, so generating an error is perhaps not the best
option.

Fixes mypyc/mypyc#1028.
---
 mypyc/irbuild/builder.py                 |  11 ++
 mypyc/irbuild/statement.py               |   5 +
 mypyc/irbuild/visitor.py                 |   4 +
 mypyc/test-data/irbuild-unreachable.test | 137 ++++++++++++++++++++++-
 4 files changed, 156 insertions(+), 1 deletion(-)

diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py
index 0757415f6753..573ca334a5d1 100644
--- a/mypyc/irbuild/builder.py
+++ b/mypyc/irbuild/builder.py
@@ -165,6 +165,9 @@ def __init__(
         self.runtime_args: list[list[RuntimeArg]] = [[]]
         self.function_name_stack: list[str] = []
         self.class_ir_stack: list[ClassIR] = []
+        # Keep track of whether the next statement in a block is reachable
+        # or not, separately for each block nesting level
+        self.block_reachable_stack: list[bool] = [True]
 
         self.current_module = current_module
         self.mapper = mapper
@@ -1302,6 +1305,14 @@ def is_native_attr_ref(self, expr: MemberExpr) -> bool:
             and not obj_rtype.class_ir.get_method(expr.name)
         )
 
+    def mark_block_unreachable(self) -> None:
+        """Mark statements in the innermost block being processed as unreachable.
+
+        This should be called after a statement that unconditionally leaves the
+        block, such as 'break' or 'return'.
+        """
+        self.block_reachable_stack[-1] = False
+
     # Lacks a good type because there wasn't a reasonable type in 3.5 :(
     def catch_errors(self, line: int) -> Any:
         return catch_errors(self.module_path, line)
diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py
index d7e01456139d..2c17eb2bb14d 100644
--- a/mypyc/irbuild/statement.py
+++ b/mypyc/irbuild/statement.py
@@ -118,8 +118,13 @@
 
 def transform_block(builder: IRBuilder, block: Block) -> None:
     if not block.is_unreachable:
+        builder.block_reachable_stack.append(True)
         for stmt in block.body:
             builder.accept(stmt)
+            if not builder.block_reachable_stack[-1]:
+                # The rest of the block is unreachable, so skip it
+                break
+        builder.block_reachable_stack.pop()
     # Raise a RuntimeError if we hit a non-empty unreachable block.
     # Don't complain about empty unreachable blocks, since mypy inserts
     # those after `if MYPY`.
diff --git a/mypyc/irbuild/visitor.py b/mypyc/irbuild/visitor.py
index d8725ee04dc5..12e186fd40d8 100644
--- a/mypyc/irbuild/visitor.py
+++ b/mypyc/irbuild/visitor.py
@@ -194,6 +194,7 @@ def visit_expression_stmt(self, stmt: ExpressionStmt) -> None:
 
     def visit_return_stmt(self, stmt: ReturnStmt) -> None:
         transform_return_stmt(self.builder, stmt)
+        self.builder.mark_block_unreachable()
 
     def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None:
         transform_assignment_stmt(self.builder, stmt)
@@ -212,12 +213,15 @@ def visit_for_stmt(self, stmt: ForStmt) -> None:
 
     def visit_break_stmt(self, stmt: BreakStmt) -> None:
         transform_break_stmt(self.builder, stmt)
+        self.builder.mark_block_unreachable()
 
     def visit_continue_stmt(self, stmt: ContinueStmt) -> None:
         transform_continue_stmt(self.builder, stmt)
+        self.builder.mark_block_unreachable()
 
     def visit_raise_stmt(self, stmt: RaiseStmt) -> None:
         transform_raise_stmt(self.builder, stmt)
+        self.builder.mark_block_unreachable()
 
     def visit_try_stmt(self, stmt: TryStmt) -> None:
         transform_try_stmt(self.builder, stmt)
diff --git a/mypyc/test-data/irbuild-unreachable.test b/mypyc/test-data/irbuild-unreachable.test
index 1c024a249bf1..b5188c91ac58 100644
--- a/mypyc/test-data/irbuild-unreachable.test
+++ b/mypyc/test-data/irbuild-unreachable.test
@@ -1,4 +1,4 @@
-# Test cases for unreachable expressions
+# Test cases for unreachable expressions and statements
 
 [case testUnreachableMemberExpr]
 import sys
@@ -104,3 +104,138 @@ L5:
 L6:
     y = r11
     return 1
+
+[case testUnreachableStatementAfterReturn]
+def f(x: bool) -> int:
+    if x:
+        return 1
+        f(False)
+    return 2
+[out]
+def f(x):
+    x :: bool
+L0:
+    if x goto L1 else goto L2 :: bool
+L1:
+    return 2
+L2:
+    return 4
+
+[case testUnreachableStatementAfterContinue]
+def c() -> bool:
+    return False
+
+def f() -> None:
+    n = True
+    while n:
+        if c():
+            continue
+            if int():
+                f()
+        n = False
+[out]
+def c():
+L0:
+    return 0
+def f():
+    n, r0 :: bool
+L0:
+    n = 1
+L1:
+    if n goto L2 else goto L5 :: bool
+L2:
+    r0 = c()
+    if r0 goto L3 else goto L4 :: bool
+L3:
+    goto L1
+L4:
+    n = 0
+    goto L1
+L5:
+    return 1
+
+[case testUnreachableStatementAfterBreak]
+def c() -> bool:
+    return False
+
+def f() -> None:
+    n = True
+    while n:
+        if c():
+            break
+            if int():
+                f()
+        n = False
+[out]
+def c():
+L0:
+    return 0
+def f():
+    n, r0 :: bool
+L0:
+    n = 1
+L1:
+    if n goto L2 else goto L5 :: bool
+L2:
+    r0 = c()
+    if r0 goto L3 else goto L4 :: bool
+L3:
+    goto L5
+L4:
+    n = 0
+    goto L1
+L5:
+    return 1
+
+[case testUnreachableStatementAfterRaise]
+def f(x: bool) -> int:
+    if x:
+        raise ValueError()
+        print('hello')
+    return 2
+[out]
+def f(x):
+    x :: bool
+    r0 :: object
+    r1 :: str
+    r2, r3 :: object
+L0:
+    if x goto L1 else goto L2 :: bool
+L1:
+    r0 = builtins :: module
+    r1 = 'ValueError'
+    r2 = CPyObject_GetAttr(r0, r1)
+    r3 = PyObject_CallFunctionObjArgs(r2, 0)
+    CPy_Raise(r3)
+    unreachable
+L2:
+    return 4
+
+[case testUnreachableStatementAfterAssertFalse]
+def f(x: bool) -> int:
+    if x:
+        assert False
+        print('hello')
+    return 2
+[out]
+def f(x):
+    x, r0 :: bool
+    r1 :: str
+    r2 :: object
+    r3 :: str
+    r4, r5 :: object
+L0:
+    if x goto L1 else goto L4 :: bool
+L1:
+    if 0 goto L3 else goto L2 :: bool
+L2:
+    r0 = raise AssertionError
+    unreachable
+L3:
+    r1 = 'hello'
+    r2 = builtins :: module
+    r3 = 'print'
+    r4 = CPyObject_GetAttr(r2, r3)
+    r5 = PyObject_CallFunctionObjArgs(r4, r1, 0)
+L4:
+    return 4

From 341929b10df327796ef60da4837b907d6af1b7d9 Mon Sep 17 00:00:00 2001
From: Ihor <31508183+nautics889@users.noreply.github.com>
Date: Mon, 23 Oct 2023 08:16:58 +0300
Subject: [PATCH 193/288] refactor: `__str__` in `CFG` class (#16307) (#16308)

Closes https://github.com/python/mypy/issues/16307.
---
 mypyc/analysis/dataflow.py | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)

diff --git a/mypyc/analysis/dataflow.py b/mypyc/analysis/dataflow.py
index cade0c823962..57ad2b17fcc5 100644
--- a/mypyc/analysis/dataflow.py
+++ b/mypyc/analysis/dataflow.py
@@ -72,11 +72,8 @@ def __init__(
         self.exits = exits
 
     def __str__(self) -> str:
-        lines = []
-        lines.append("exits: %s" % sorted(self.exits, key=lambda e: int(e.label)))
-        lines.append("succ: %s" % self.succ)
-        lines.append("pred: %s" % self.pred)
-        return "\n".join(lines)
+        exits = sorted(self.exits, key=lambda e: int(e.label))
+        return f"exits: {exits}\nsucc: {self.succ}\npred: {self.pred}"
 
 
 def get_cfg(blocks: list[BasicBlock]) -> CFG:

From cda163d378d6f85627b72454918cba323bf37749 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 23 Oct 2023 06:48:37 +0100
Subject: [PATCH 194/288] Clarify variance convention for Parameters (#16302)

Fixes https://github.com/python/mypy/issues/16296

In my big refactoring I flipped the variance convention for the
`Parameters` type, but I did it inconsistently in one place. After
working some more with ParamSpecs, it now seems to me the original
convention is easier to remember. I also now explicitly put it in the
type docstring.
---
 mypy/constraints.py                           |  9 ++----
 mypy/join.py                                  | 13 ++++++---
 mypy/meet.py                                  |  6 ++--
 mypy/subtypes.py                              |  2 --
 mypy/types.py                                 |  5 +++-
 .../unit/check-parameter-specification.test   | 29 ++++++++++++++++++-
 6 files changed, 47 insertions(+), 17 deletions(-)

diff --git a/mypy/constraints.py b/mypy/constraints.py
index 58d0f4dbed29..7d782551b261 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -692,11 +692,8 @@ def visit_parameters(self, template: Parameters) -> list[Constraint]:
             return self.infer_against_any(template.arg_types, self.actual)
         if type_state.infer_polymorphic and isinstance(self.actual, Parameters):
             # For polymorphic inference we need to be able to infer secondary constraints
-            # in situations like [x: T] <: P <: [x: int]. Note we invert direction, since
-            # this function expects direction between callables.
-            return infer_callable_arguments_constraints(
-                template, self.actual, neg_op(self.direction)
-            )
+            # in situations like [x: T] <: P <: [x: int].
+            return infer_callable_arguments_constraints(template, self.actual, self.direction)
         raise RuntimeError("Parameters cannot be constrained to")
 
     # Non-leaf types
@@ -1128,7 +1125,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                             )
                         )
                 if param_spec_target is not None:
-                    res.append(Constraint(param_spec, neg_op(self.direction), param_spec_target))
+                    res.append(Constraint(param_spec, self.direction, param_spec_target))
             if extra_tvars:
                 for c in res:
                     c.extra_tvars += cactual.variables
diff --git a/mypy/join.py b/mypy/join.py
index e4429425d98a..2e2939f9fbc8 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -350,10 +350,13 @@ def visit_parameters(self, t: Parameters) -> ProperType:
         if isinstance(self.s, Parameters):
             if len(t.arg_types) != len(self.s.arg_types):
                 return self.default(self.s)
+            from mypy.meet import meet_types
+
             return t.copy_modified(
-                # Note that since during constraint inference we already treat whole ParamSpec as
-                # contravariant, we should join individual items, not meet them like for Callables
-                arg_types=[join_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)]
+                arg_types=[
+                    meet_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)
+                ],
+                arg_names=combine_arg_names(self.s, t),
             )
         else:
             return self.default(self.s)
@@ -754,7 +757,9 @@ def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType:
     )
 
 
-def combine_arg_names(t: CallableType, s: CallableType) -> list[str | None]:
+def combine_arg_names(
+    t: CallableType | Parameters, s: CallableType | Parameters
+) -> list[str | None]:
     """Produces a list of argument names compatible with both callables.
 
     For example, suppose 't' and 's' have the following signatures:
diff --git a/mypy/meet.py b/mypy/meet.py
index e3645c7b5879..1a566aed17de 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -708,10 +708,10 @@ def visit_parameters(self, t: Parameters) -> ProperType:
         if isinstance(self.s, Parameters):
             if len(t.arg_types) != len(self.s.arg_types):
                 return self.default(self.s)
+            from mypy.join import join_types
+
             return t.copy_modified(
-                # Note that since during constraint inference we already treat whole ParamSpec as
-                # contravariant, we should meet individual items, not join them like for Callables
-                arg_types=[meet_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)]
+                arg_types=[join_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)]
             )
         else:
             return self.default(self.s)
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index b79e0e628849..2ca3357dd722 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -654,8 +654,6 @@ def visit_unpack_type(self, left: UnpackType) -> bool:
 
     def visit_parameters(self, left: Parameters) -> bool:
         if isinstance(self.right, Parameters):
-            # TODO: direction here should be opposite, this function expects
-            # order of callables, while parameters are contravariant.
             return are_parameters_compatible(
                 left,
                 self.right,
diff --git a/mypy/types.py b/mypy/types.py
index d08e9e7a890c..ae1a1f595fa2 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -1562,7 +1562,10 @@ class FormalArgument(NamedTuple):
 class Parameters(ProperType):
     """Type that represents the parameters to a function.
 
-    Used for ParamSpec analysis."""
+    Used for ParamSpec analysis. Note that by convention we handle this
+    type as a Callable without return type, not as a "tuple with names",
+    so that it behaves contravariantly, in particular [x: int] <: [int].
+    """
 
     __slots__ = (
         "arg_types",
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index 48fadbc96c90..db8c76fd21e9 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1403,7 +1403,7 @@ def wrong_name_constructor(b: bool) -> SomeClass:
 func(SomeClass, constructor)
 reveal_type(func(SomeClass, wrong_constructor))  # N: Revealed type is "def (a: Never) -> __main__.SomeClass"
 reveal_type(func_regular(SomeClass, wrong_constructor))  # N: Revealed type is "def (Never) -> __main__.SomeClass"
-func(SomeClass, wrong_name_constructor)  # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[Never], SomeClass]"
+reveal_type(func(SomeClass, wrong_name_constructor))  # N: Revealed type is "def (Never) -> __main__.SomeClass"
 [builtins fixtures/paramspec.pyi]
 
 [case testParamSpecInTypeAliasBasic]
@@ -2059,3 +2059,30 @@ def test2(x: int, y: int) -> str: ...
 reveal_type(call(test1, 1))  # N: Revealed type is "builtins.str"
 reveal_type(call(test2, 1, 2))  # N: Revealed type is "builtins.str"
 [builtins fixtures/paramspec.pyi]
+
+[case testParamSpecCorrectParameterNameInference]
+from typing import Callable, Protocol
+from typing_extensions import ParamSpec, Concatenate
+
+def a(i: int) -> None: ...
+def b(__i: int) -> None: ...
+
+class WithName(Protocol):
+    def __call__(self, i: int) -> None: ...
+NoName = Callable[[int], None]
+
+def f1(__fn: WithName, i: int) -> None: ...
+def f2(__fn: NoName, i: int) -> None: ...
+
+P = ParamSpec("P")
+def d(f: Callable[P, None], fn: Callable[Concatenate[Callable[P, None], P], None]) -> Callable[P, None]:
+    def inner(*args: P.args, **kwargs: P.kwargs) -> None:
+        fn(f, *args, **kwargs)
+    return inner
+
+reveal_type(d(a, f1))  # N: Revealed type is "def (i: builtins.int)"
+reveal_type(d(a, f2))  # N: Revealed type is "def (i: builtins.int)"
+reveal_type(d(b, f1))  # E: Cannot infer type argument 1 of "d" \
+                       # N: Revealed type is "def (*Any, **Any)"
+reveal_type(d(b, f2))  # N: Revealed type is "def (builtins.int)"
+[builtins fixtures/paramspec.pyi]

From 8236c93d899fa5225eb23644db802cf1e09196a7 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Mon, 23 Oct 2023 15:52:42 +0300
Subject: [PATCH 195/288] Add `|=` and `|` operators support for `TypedDict`
 (#16249)

Please, note that there are several problems with `__ror__` definitions.
1. `dict.__ror__` does not define support for `Mapping?` types. For
example:
```python
>>> import types
>>> {'a': 1} | types.MappingProxyType({'b': 2})
{'a': 1, 'b': 2}
>>>
```
2. `TypedDict.__ror__` also does not define this support

So, I would like to defer this feature for the future, we need some
discussion to happen.
However, this PR does fully solve the problem OP had.

Closes https://github.com/python/mypy/issues/16244

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
 mypy/checker.py                               |  19 ++-
 mypy/checkexpr.py                             |  55 ++++++-
 mypy/plugins/default.py                       |  22 ++-
 test-data/unit/check-typeddict.test           | 143 ++++++++++++++++++
 test-data/unit/fixtures/dict.pyi              |  19 ++-
 test-data/unit/fixtures/typing-async.pyi      |   1 +
 test-data/unit/fixtures/typing-full.pyi       |   1 +
 test-data/unit/fixtures/typing-medium.pyi     |   1 +
 .../unit/fixtures/typing-typeddict-iror.pyi   |  66 ++++++++
 9 files changed, 316 insertions(+), 11 deletions(-)
 create mode 100644 test-data/unit/fixtures/typing-typeddict-iror.pyi

diff --git a/mypy/checker.py b/mypy/checker.py
index 02bab37aa13f..64bbbfa0a55b 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -7783,14 +7783,25 @@ def infer_operator_assignment_method(typ: Type, operator: str) -> tuple[bool, st
     """
     typ = get_proper_type(typ)
     method = operators.op_methods[operator]
+    existing_method = None
     if isinstance(typ, Instance):
-        if operator in operators.ops_with_inplace_method:
-            inplace_method = "__i" + method[2:]
-            if typ.type.has_readable_member(inplace_method):
-                return True, inplace_method
+        existing_method = _find_inplace_method(typ, method, operator)
+    elif isinstance(typ, TypedDictType):
+        existing_method = _find_inplace_method(typ.fallback, method, operator)
+
+    if existing_method is not None:
+        return True, existing_method
     return False, method
 
 
+def _find_inplace_method(inst: Instance, method: str, operator: str) -> str | None:
+    if operator in operators.ops_with_inplace_method:
+        inplace_method = "__i" + method[2:]
+        if inst.type.has_readable_member(inplace_method):
+            return inplace_method
+    return None
+
+
 def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool:
     """Is an inferred type valid and needs no further refinement?
 
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 2dc5a93a1de9..18c1c570ba91 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -2,12 +2,13 @@
 
 from __future__ import annotations
 
+import enum
 import itertools
 import time
 from collections import defaultdict
 from contextlib import contextmanager
 from typing import Callable, ClassVar, Final, Iterable, Iterator, List, Optional, Sequence, cast
-from typing_extensions import TypeAlias as _TypeAlias, overload
+from typing_extensions import TypeAlias as _TypeAlias, assert_never, overload
 
 import mypy.checker
 import mypy.errorcodes as codes
@@ -277,6 +278,20 @@ class Finished(Exception):
     """Raised if we can terminate overload argument check early (no match)."""
 
 
+@enum.unique
+class UseReverse(enum.Enum):
+    """Used in `visit_op_expr` to enable or disable reverse method checks."""
+
+    DEFAULT = 0
+    ALWAYS = 1
+    NEVER = 2
+
+
+USE_REVERSE_DEFAULT: Final = UseReverse.DEFAULT
+USE_REVERSE_ALWAYS: Final = UseReverse.ALWAYS
+USE_REVERSE_NEVER: Final = UseReverse.NEVER
+
+
 class ExpressionChecker(ExpressionVisitor[Type]):
     """Expression type checker.
 
@@ -3371,6 +3386,24 @@ def visit_op_expr(self, e: OpExpr) -> Type:
                         return proper_left_type.copy_modified(
                             items=proper_left_type.items + [UnpackType(mapped)]
                         )
+
+        use_reverse: UseReverse = USE_REVERSE_DEFAULT
+        if e.op == "|":
+            if is_named_instance(proper_left_type, "builtins.dict"):
+                # This is a special case for `dict | TypedDict`.
+                # 1. Find `dict | TypedDict` case
+                # 2. Switch `dict.__or__` to `TypedDict.__ror__` (the same from both runtime and typing perspective)
+                proper_right_type = get_proper_type(self.accept(e.right))
+                if isinstance(proper_right_type, TypedDictType):
+                    use_reverse = USE_REVERSE_ALWAYS
+            if isinstance(proper_left_type, TypedDictType):
+                # This is the reverse case: `TypedDict | dict`,
+                # simply do not allow the reverse checking:
+                # do not call `__dict__.__ror__`.
+                proper_right_type = get_proper_type(self.accept(e.right))
+                if is_named_instance(proper_right_type, "builtins.dict"):
+                    use_reverse = USE_REVERSE_NEVER
+
         if TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature:
             # Handle tuple[X, ...] + tuple[Y, Z] = tuple[*tuple[X, ...], Y, Z].
             if (
@@ -3390,7 +3423,25 @@ def visit_op_expr(self, e: OpExpr) -> Type:
 
         if e.op in operators.op_methods:
             method = operators.op_methods[e.op]
-            result, method_type = self.check_op(method, left_type, e.right, e, allow_reverse=True)
+            if use_reverse is UseReverse.DEFAULT or use_reverse is UseReverse.NEVER:
+                result, method_type = self.check_op(
+                    method,
+                    base_type=left_type,
+                    arg=e.right,
+                    context=e,
+                    allow_reverse=use_reverse is UseReverse.DEFAULT,
+                )
+            elif use_reverse is UseReverse.ALWAYS:
+                result, method_type = self.check_op(
+                    # The reverse operator here gives better error messages:
+                    operators.reverse_op_methods[method],
+                    base_type=self.accept(e.right),
+                    arg=e.left,
+                    context=e,
+                    allow_reverse=False,
+                )
+            else:
+                assert_never(use_reverse)
             e.method_type = method_type
             return result
         else:
diff --git a/mypy/plugins/default.py b/mypy/plugins/default.py
index b60fc3873c04..ddcc37f465fe 100644
--- a/mypy/plugins/default.py
+++ b/mypy/plugins/default.py
@@ -74,12 +74,21 @@ def get_method_signature_hook(
             return typed_dict_setdefault_signature_callback
         elif fullname in {n + ".pop" for n in TPDICT_FB_NAMES}:
             return typed_dict_pop_signature_callback
-        elif fullname in {n + ".update" for n in TPDICT_FB_NAMES}:
-            return typed_dict_update_signature_callback
         elif fullname == "_ctypes.Array.__setitem__":
             return ctypes.array_setitem_callback
         elif fullname == singledispatch.SINGLEDISPATCH_CALLABLE_CALL_METHOD:
             return singledispatch.call_singledispatch_function_callback
+
+        typed_dict_updates = set()
+        for n in TPDICT_FB_NAMES:
+            typed_dict_updates.add(n + ".update")
+            typed_dict_updates.add(n + ".__or__")
+            typed_dict_updates.add(n + ".__ror__")
+            typed_dict_updates.add(n + ".__ior__")
+
+        if fullname in typed_dict_updates:
+            return typed_dict_update_signature_callback
+
         return None
 
     def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None:
@@ -401,11 +410,16 @@ def typed_dict_delitem_callback(ctx: MethodContext) -> Type:
 
 
 def typed_dict_update_signature_callback(ctx: MethodSigContext) -> CallableType:
-    """Try to infer a better signature type for TypedDict.update."""
+    """Try to infer a better signature type for methods that update `TypedDict`.
+
+    This includes: `TypedDict.update`, `TypedDict.__or__`, `TypedDict.__ror__`,
+    and `TypedDict.__ior__`.
+    """
     signature = ctx.default_signature
     if isinstance(ctx.type, TypedDictType) and len(signature.arg_types) == 1:
         arg_type = get_proper_type(signature.arg_types[0])
-        assert isinstance(arg_type, TypedDictType)
+        if not isinstance(arg_type, TypedDictType):
+            return signature
         arg_type = arg_type.as_anonymous()
         arg_type = arg_type.copy_modified(required_keys=set())
         if ctx.args and ctx.args[0]:
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index 7ee9ef0b708b..0e1d800e0468 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -3236,3 +3236,146 @@ def foo(x: int) -> Foo: ...
 f: Foo = {**foo("no")}  # E: Argument 1 to "foo" has incompatible type "str"; expected "int"
 [builtins fixtures/dict.pyi]
 [typing fixtures/typing-typeddict.pyi]
+
+
+[case testTypedDictWith__or__method]
+from typing import Dict
+from mypy_extensions import TypedDict
+
+class Foo(TypedDict):
+    key: int
+
+foo1: Foo = {'key': 1}
+foo2: Foo = {'key': 2}
+
+reveal_type(foo1 | foo2)  # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})"
+reveal_type(foo1 | {'key': 1})  # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})"
+reveal_type(foo1 | {'key': 'a'})  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+reveal_type(foo1 | {})  # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})"
+
+d1: Dict[str, int]
+d2: Dict[int, str]
+
+reveal_type(foo1 | d1)  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+foo1 | d2  # E: Unsupported operand types for | ("Foo" and "Dict[int, str]")
+
+
+class Bar(TypedDict):
+    key: int
+    value: str
+
+bar: Bar
+reveal_type(bar | {})  # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})"
+reveal_type(bar | {'key': 1, 'value': 'v'})  # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})"
+reveal_type(bar | {'key': 1})  # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})"
+reveal_type(bar | {'value': 'v'})  # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})"
+reveal_type(bar | {'key': 'a'})  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+reveal_type(bar | {'value': 1})  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+reveal_type(bar | {'key': 'a', 'value': 1})  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+
+reveal_type(bar | foo1)  # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})"
+reveal_type(bar | d1)  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+bar | d2  # E: Unsupported operand types for | ("Bar" and "Dict[int, str]")
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-typeddict-iror.pyi]
+
+[case testTypedDictWith__or__method_error]
+from mypy_extensions import TypedDict
+
+class Foo(TypedDict):
+    key: int
+
+foo: Foo = {'key': 1}
+foo | 1
+
+class SubDict(dict): ...
+foo | SubDict()
+[out]
+main:7: error: No overload variant of "__or__" of "TypedDict" matches argument type "int"
+main:7: note: Possible overload variants:
+main:7: note:     def __or__(self, TypedDict({'key'?: int}), /) -> Foo
+main:7: note:     def __or__(self, Dict[str, Any], /) -> Dict[str, object]
+main:10: error: No overload variant of "__ror__" of "dict" matches argument type "Foo"
+main:10: note: Possible overload variants:
+main:10: note:     def __ror__(self, Dict[Any, Any], /) -> Dict[Any, Any]
+main:10: note:     def [T, T2] __ror__(self, Dict[T, T2], /) -> Dict[Union[Any, T], Union[Any, T2]]
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-typeddict-iror.pyi]
+
+[case testTypedDictWith__ror__method]
+from typing import Dict
+from mypy_extensions import TypedDict
+
+class Foo(TypedDict):
+    key: int
+
+foo: Foo = {'key': 1}
+
+reveal_type({'key': 1} | foo)  # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})"
+reveal_type({'key': 'a'} | foo)  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+reveal_type({} | foo)  # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})"
+{1: 'a'} | foo  # E: Dict entry 0 has incompatible type "int": "str"; expected "str": "Any"
+
+d1: Dict[str, int]
+d2: Dict[int, str]
+
+reveal_type(d1 | foo)  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+d2 | foo  # E: Unsupported operand types for | ("Dict[int, str]" and "Foo")
+1 | foo  # E: Unsupported left operand type for | ("int")
+
+
+class Bar(TypedDict):
+    key: int
+    value: str
+
+bar: Bar
+reveal_type({} | bar)  # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})"
+reveal_type({'key': 1, 'value': 'v'} | bar)  # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})"
+reveal_type({'key': 1} | bar)  # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})"
+reveal_type({'value': 'v'} | bar)  # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})"
+reveal_type({'key': 'a'} | bar)  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+reveal_type({'value': 1} | bar)  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+reveal_type({'key': 'a', 'value': 1} | bar)  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+
+reveal_type(d1 | bar)  # N: Revealed type is "builtins.dict[builtins.str, builtins.object]"
+d2 | bar  # E: Unsupported operand types for | ("Dict[int, str]" and "Bar")
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-typeddict-iror.pyi]
+
+[case testTypedDictWith__ior__method]
+from typing import Dict
+from mypy_extensions import TypedDict
+
+class Foo(TypedDict):
+    key: int
+
+foo: Foo = {'key': 1}
+foo |= {'key': 2}
+
+foo |= {}
+foo |= {'key': 'a', 'b': 'a'}  # E: Expected TypedDict key "key" but found keys ("key", "b")  \
+                               # E: Incompatible types (expression has type "str", TypedDict item "key" has type "int")
+foo |= {'b': 2}  # E: Unexpected TypedDict key "b"
+
+d1: Dict[str, int]
+d2: Dict[int, str]
+
+foo |= d1  # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Dict[str, int]"; expected "TypedDict({'key'?: int})"
+foo |= d2  # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Dict[int, str]"; expected "TypedDict({'key'?: int})"
+
+
+class Bar(TypedDict):
+    key: int
+    value: str
+
+bar: Bar
+bar |= {}
+bar |= {'key': 1, 'value': 'a'}
+bar |= {'key': 'a', 'value': 'a', 'b': 'a'}  # E: Expected TypedDict keys ("key", "value") but found keys ("key", "value", "b") \
+                                             # E: Incompatible types (expression has type "str", TypedDict item "key" has type "int")
+
+bar |= foo
+bar |= d1  # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Dict[str, int]"; expected "TypedDict({'key'?: int, 'value'?: str})"
+bar |= d2  # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Dict[int, str]"; expected "TypedDict({'key'?: int, 'value'?: str})"
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-typeddict-iror.pyi]
diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi
index 19d175ff79ab..7c0c8767f7d7 100644
--- a/test-data/unit/fixtures/dict.pyi
+++ b/test-data/unit/fixtures/dict.pyi
@@ -3,10 +3,12 @@
 from _typeshed import SupportsKeysAndGetItem
 import _typeshed
 from typing import (
-    TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union, Sequence
+    TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union, Sequence,
+    Self,
 )
 
 T = TypeVar('T')
+T2 = TypeVar('T2')
 KT = TypeVar('KT')
 VT = TypeVar('VT')
 
@@ -34,6 +36,21 @@ class dict(Mapping[KT, VT]):
     def get(self, k: KT, default: Union[VT, T]) -> Union[VT, T]: pass
     def __len__(self) -> int: ...
 
+    # This was actually added in 3.9:
+    @overload
+    def __or__(self, __value: dict[KT, VT]) -> dict[KT, VT]: ...
+    @overload
+    def __or__(self, __value: dict[T, T2]) -> dict[Union[KT, T], Union[VT, T2]]: ...
+    @overload
+    def __ror__(self, __value: dict[KT, VT]) -> dict[KT, VT]: ...
+    @overload
+    def __ror__(self, __value: dict[T, T2]) -> dict[Union[KT, T], Union[VT, T2]]: ...
+    # dict.__ior__ should be kept roughly in line with MutableMapping.update()
+    @overload  # type: ignore[misc]
+    def __ior__(self, __value: _typeshed.SupportsKeysAndGetItem[KT, VT]) -> Self: ...
+    @overload
+    def __ior__(self, __value: Iterable[Tuple[KT, VT]]) -> Self: ...
+
 class int: # for convenience
     def __add__(self, x: Union[int, complex]) -> int: pass
     def __radd__(self, x: int) -> int: pass
diff --git a/test-data/unit/fixtures/typing-async.pyi b/test-data/unit/fixtures/typing-async.pyi
index b207dd599c33..9897dfd0b270 100644
--- a/test-data/unit/fixtures/typing-async.pyi
+++ b/test-data/unit/fixtures/typing-async.pyi
@@ -24,6 +24,7 @@ ClassVar = 0
 Final = 0
 Literal = 0
 NoReturn = 0
+Self = 0
 
 T = TypeVar('T')
 T_co = TypeVar('T_co', covariant=True)
diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi
index e9f0aa199bb4..ef903ace78af 100644
--- a/test-data/unit/fixtures/typing-full.pyi
+++ b/test-data/unit/fixtures/typing-full.pyi
@@ -30,6 +30,7 @@ Literal = 0
 TypedDict = 0
 NoReturn = 0
 NewType = 0
+Self = 0
 
 T = TypeVar('T')
 T_co = TypeVar('T_co', covariant=True)
diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi
index 03be1d0a664d..c19c5d5d96e2 100644
--- a/test-data/unit/fixtures/typing-medium.pyi
+++ b/test-data/unit/fixtures/typing-medium.pyi
@@ -28,6 +28,7 @@ NoReturn = 0
 NewType = 0
 TypeAlias = 0
 LiteralString = 0
+Self = 0
 
 T = TypeVar('T')
 T_co = TypeVar('T_co', covariant=True)
diff --git a/test-data/unit/fixtures/typing-typeddict-iror.pyi b/test-data/unit/fixtures/typing-typeddict-iror.pyi
new file mode 100644
index 000000000000..e452c8497109
--- /dev/null
+++ b/test-data/unit/fixtures/typing-typeddict-iror.pyi
@@ -0,0 +1,66 @@
+# Test stub for typing module that includes TypedDict `|` operator.
+# It only covers `__or__`, `__ror__`, and `__ior__`.
+#
+# We cannot define these methods in `typing-typeddict.pyi`,
+# because they need `dict` with two type args,
+# and not all tests using `[typing typing-typeddict.pyi]` have the proper
+# `dict` stub.
+#
+# Keep in sync with `typeshed`'s definition.
+from abc import ABCMeta
+
+cast = 0
+assert_type = 0
+overload = 0
+Any = 0
+Union = 0
+Optional = 0
+TypeVar = 0
+Generic = 0
+Protocol = 0
+Tuple = 0
+Callable = 0
+NamedTuple = 0
+Final = 0
+Literal = 0
+TypedDict = 0
+NoReturn = 0
+Required = 0
+NotRequired = 0
+Self = 0
+
+T = TypeVar('T')
+T_co = TypeVar('T_co', covariant=True)
+V = TypeVar('V')
+
+# Note: definitions below are different from typeshed, variances are declared
+# to silence the protocol variance checks. Maybe it is better to use type: ignore?
+
+class Sized(Protocol):
+    def __len__(self) -> int: pass
+
+class Iterable(Protocol[T_co]):
+    def __iter__(self) -> 'Iterator[T_co]': pass
+
+class Iterator(Iterable[T_co], Protocol):
+    def __next__(self) -> T_co: pass
+
+class Sequence(Iterable[T_co]):
+    # misc is for explicit Any.
+    def __getitem__(self, n: Any) -> T_co: pass # type: ignore[misc]
+
+class Mapping(Iterable[T], Generic[T, T_co], metaclass=ABCMeta):
+    pass
+
+# Fallback type for all typed dicts (does not exist at runtime).
+class _TypedDict(Mapping[str, object]):
+    @overload
+    def __or__(self, __value: Self) -> Self: ...
+    @overload
+    def __or__(self, __value: dict[str, Any]) -> dict[str, object]: ...
+    @overload
+    def __ror__(self, __value: Self) -> Self: ...
+    @overload
+    def __ror__(self, __value: dict[str, Any]) -> dict[str, object]: ...
+    # supposedly incompatible definitions of __or__ and __ior__
+    def __ior__(self, __value: Self) -> Self: ...  # type: ignore[misc]

From 167dc7095758ddc001119e1c9f330bff4af72b22 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Tue, 24 Oct 2023 15:45:04 -0700
Subject: [PATCH 196/288] Fix sdist build by not including CHANGELOG.md
 (#16323)

This is an attempt to fix wheel builds. Perhaps we'd want to actually
include the changelog in the sdist. We can decide this later after the
build has bee fixed.

We've been getting these errors:
```
...
lists of files in version control and sdist do not match!
missing from sdist:
  CHANGELOG.md
listing source files under version control: 830 files and directories
building an sdist: mypy-1.7.0+dev.ffe89a21058eaa6eb1c1796d9ab87aece965e2d9.tar.gz: 829 files and directories
copying source files to a temporary directory
building a clean sdist: mypy-1.7.0+dev.tar.gz: 829 files and directories
suggested MANIFEST.in rules:
  include *.md
Error: Process completed with exit code 1.
``

Example failure:
https://github.com/mypyc/mypy_mypyc-wheels/actions/runs/6555980362/job/17805243900
---
 MANIFEST.in | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/MANIFEST.in b/MANIFEST.in
index 3ae340c7bd5e..c18b83cc0088 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -42,7 +42,7 @@ include pytest.ini
 include tox.ini
 
 include LICENSE mypyc/README.md
-exclude .gitmodules CONTRIBUTING.md CREDITS ROADMAP.md action.yml .editorconfig
+exclude .gitmodules CONTRIBUTING.md CREDITS ROADMAP.md CHANGELOG.md action.yml .editorconfig
 exclude .git-blame-ignore-revs .pre-commit-config.yaml
 
 global-exclude *.py[cod]

From 090a414ba022f600bd65e7611fa3691903fd5a74 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Wed, 25 Oct 2023 07:03:26 -0700
Subject: [PATCH 197/288] Run macOS mypyc tests with Python 3.9 (#16326)

The 3.8 tests have been flaking for several weeks and I don't think
anyone has a good repro or idea as to the cause
---
 .github/workflows/test.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 86704aca2f91..4613605425c3 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -71,8 +71,8 @@ jobs:
           tox_extra_args: "-n 2"
           test_mypyc: true
 
-        - name: mypyc runtime tests with py38-macos
-          python: '3.8.17'
+        - name: mypyc runtime tests with py39-macos
+          python: '3.9.18'
           arch: x64
           os: macos-latest
           toxenv: py

From f7d047cd6dc008ab767510211d5c466d1c5e9215 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Fri, 27 Oct 2023 13:45:51 +0100
Subject: [PATCH 198/288] [mypyc] Generate error on duplicate function
 definitions (#16309)

Previously we produced duplicate functions in C, which caused C
compiler errors.
---
 mypyc/irbuild/builder.py                |  9 +++++++++
 mypyc/irbuild/function.py               |  2 +-
 mypyc/test-data/irbuild-statements.test | 24 ++++++++++++++++++++++++
 mypyc/test-data/run-misc.test           |  4 ----
 4 files changed, 34 insertions(+), 5 deletions(-)

diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py
index 573ca334a5d1..5ed617aa925f 100644
--- a/mypyc/irbuild/builder.py
+++ b/mypyc/irbuild/builder.py
@@ -175,6 +175,7 @@ def __init__(
         self.graph = graph
         self.ret_types: list[RType] = []
         self.functions: list[FuncIR] = []
+        self.function_names: set[tuple[str | None, str]] = set()
         self.classes: list[ClassIR] = []
         self.final_names: list[tuple[str, RType]] = []
         self.callable_class_names: set[str] = set()
@@ -1326,6 +1327,14 @@ def error(self, msg: str, line: int) -> None:
     def note(self, msg: str, line: int) -> None:
         self.errors.note(msg, self.module_path, line)
 
+    def add_function(self, func_ir: FuncIR, line: int) -> None:
+        name = (func_ir.class_name, func_ir.name)
+        if name in self.function_names:
+            self.error(f'Duplicate definition of "{name[1]}" not supported by mypyc', line)
+            return
+        self.function_names.add(name)
+        self.functions.append(func_ir)
+
 
 def gen_arg_defaults(builder: IRBuilder) -> None:
     """Generate blocks for arguments that have default values.
diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py
index ebf7fa9a54de..b1785f40550e 100644
--- a/mypyc/irbuild/function.py
+++ b/mypyc/irbuild/function.py
@@ -103,7 +103,7 @@ def transform_func_def(builder: IRBuilder, fdef: FuncDef) -> None:
     if func_reg:
         builder.assign(get_func_target(builder, fdef), func_reg, fdef.line)
     maybe_insert_into_registry_dict(builder, fdef)
-    builder.functions.append(func_ir)
+    builder.add_function(func_ir, fdef.line)
 
 
 def transform_overloaded_func_def(builder: IRBuilder, o: OverloadedFuncDef) -> None:
diff --git a/mypyc/test-data/irbuild-statements.test b/mypyc/test-data/irbuild-statements.test
index 490b41336e88..b7c67730a05f 100644
--- a/mypyc/test-data/irbuild-statements.test
+++ b/mypyc/test-data/irbuild-statements.test
@@ -1123,3 +1123,27 @@ L6:
     r14 = CPy_NoErrOccured()
 L7:
     return 1
+
+[case testConditionalFunctionDefinition]
+if int():
+    def foo() -> int:
+        return 0
+else:
+    def foo() -> int:  # E
+        return 1
+
+def bar() -> int:
+    return 0
+
+if int():
+    def bar() -> int:  # E
+        return 1
+[out]
+main:5: error: Duplicate definition of "foo" not supported by mypyc
+main:12: error: Duplicate definition of "bar" not supported by mypyc
+
+[case testRepeatedUnderscoreFunctions]
+def _(arg): pass
+def _(arg): pass
+[out]
+main:2: error: Duplicate definition of "_" not supported by mypyc
diff --git a/mypyc/test-data/run-misc.test b/mypyc/test-data/run-misc.test
index c40e0fc55f0e..f77ba3a1302b 100644
--- a/mypyc/test-data/run-misc.test
+++ b/mypyc/test-data/run-misc.test
@@ -1117,10 +1117,6 @@ for _ in range(2):
     except AssertionError:
         pass
 
-[case testRepeatedUnderscoreFunctions]
-def _(arg): pass
-def _(arg): pass
-
 [case testUnderscoreFunctionsInMethods]
 
 class A:

From 5ef9c82c19941bd376128491b7959f551bd530e7 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Fri, 27 Oct 2023 16:25:52 +0100
Subject: [PATCH 199/288] [daemon] Fix return type change to optional in
 generic function (#16342)

Previously changing a return type to an optional type was not propagated
at least in some cases, since astdiff could simplify away the optional
type.
---
 mypy/server/astdiff.py   |  4 +++-
 test-data/unit/diff.test | 33 +++++++++++++++++++++++++++++++++
 2 files changed, 36 insertions(+), 1 deletion(-)

diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py
index 93f178dca35a..5323bf2c57cb 100644
--- a/mypy/server/astdiff.py
+++ b/mypy/server/astdiff.py
@@ -74,6 +74,7 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method'
     Var,
 )
 from mypy.semanal_shared import find_dataclass_transform_spec
+from mypy.state import state
 from mypy.types import (
     AnyType,
     CallableType,
@@ -456,7 +457,8 @@ def normalize_callable_variables(self, typ: CallableType) -> CallableType:
                 tv = v.copy_modified(id=tid)
             tvs.append(tv)
             tvmap[v.id] = tv
-        return expand_type(typ, tvmap).copy_modified(variables=tvs)
+        with state.strict_optional_set(True):
+            return expand_type(typ, tvmap).copy_modified(variables=tvs)
 
     def visit_tuple_type(self, typ: TupleType) -> SnapshotItem:
         return ("TupleType", snapshot_types(typ.items))
diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test
index 66adfaecd909..8fc74868123e 100644
--- a/test-data/unit/diff.test
+++ b/test-data/unit/diff.test
@@ -1497,3 +1497,36 @@ class C:
     def meth(self) -> int: return 0
 [out]
 __main__.C.meth
+
+[case testGenericFunctionWithOptionalReturnType]
+from typing import Type, TypeVar
+
+T = TypeVar("T")
+
+class C:
+    @classmethod
+    def get_by_team_and_id(
+        cls: Type[T],
+        raw_member_id: int,
+        include_removed: bool = False,
+    ) -> T:
+        pass
+
+[file next.py]
+from typing import Type, TypeVar, Optional
+
+T = TypeVar("T")
+
+class C:
+    @classmethod
+    def get_by_team_and_id(
+        cls: Type[T],
+        raw_member_id: int,
+        include_removed: bool = False,
+    ) -> Optional[T]:
+        pass
+
+[builtins fixtures/classmethod.pyi]
+[out]
+__main__.C.get_by_team_and_id
+__main__.Optional

From b41c8c1ec4337f158d70d9dfd2032c2ae03a017c Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 27 Oct 2023 18:35:48 +0100
Subject: [PATCH 200/288] Use upper bound as inference fallback more
 consistently (#16344)

Fixes https://github.com/python/mypy/issues/16331

Fix is straightforward: do not use the fallback, where we would not give
the error in the first place.
---
 mypy/checkexpr.py                   |  4 +++-
 mypy/infer.py                       |  8 ++++++--
 mypy/solve.py                       |  5 ++++-
 test-data/unit/check-inference.test | 19 +++++++++++++++++++
 4 files changed, 32 insertions(+), 4 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 18c1c570ba91..ddcaa6ee30c9 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -1989,7 +1989,9 @@ def infer_function_type_arguments_using_context(
             # in this case external context is almost everything we have.
             if not is_generic_instance(ctx) and not is_literal_type_like(ctx):
                 return callable.copy_modified()
-        args = infer_type_arguments(callable.variables, ret_type, erased_ctx)
+        args = infer_type_arguments(
+            callable.variables, ret_type, erased_ctx, skip_unsatisfied=True
+        )
         # Only substitute non-Uninhabited and non-erased types.
         new_args: list[Type | None] = []
         for arg in args:
diff --git a/mypy/infer.py b/mypy/infer.py
index ba4a1d2bc9b1..bcf0c95808ab 100644
--- a/mypy/infer.py
+++ b/mypy/infer.py
@@ -63,9 +63,13 @@ def infer_function_type_arguments(
 
 
 def infer_type_arguments(
-    type_vars: Sequence[TypeVarLikeType], template: Type, actual: Type, is_supertype: bool = False
+    type_vars: Sequence[TypeVarLikeType],
+    template: Type,
+    actual: Type,
+    is_supertype: bool = False,
+    skip_unsatisfied: bool = False,
 ) -> list[Type | None]:
     # Like infer_function_type_arguments, but only match a single type
     # against a generic type.
     constraints = infer_constraints(template, actual, SUPERTYPE_OF if is_supertype else SUBTYPE_OF)
-    return solve_constraints(type_vars, constraints)[0]
+    return solve_constraints(type_vars, constraints, skip_unsatisfied=skip_unsatisfied)[0]
diff --git a/mypy/solve.py b/mypy/solve.py
index 4d0ca6b7af24..efe8e487c506 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -43,6 +43,7 @@ def solve_constraints(
     constraints: list[Constraint],
     strict: bool = True,
     allow_polymorphic: bool = False,
+    skip_unsatisfied: bool = False,
 ) -> tuple[list[Type | None], list[TypeVarLikeType]]:
     """Solve type constraints.
 
@@ -54,6 +55,8 @@ def solve_constraints(
     If allow_polymorphic=True, then use the full algorithm that can potentially return
     free type variables in solutions (these require special care when applying). Otherwise,
     use a simplified algorithm that just solves each type variable individually if possible.
+
+    The skip_unsatisfied flag matches the same one in applytype.apply_generic_arguments().
     """
     vars = [tv.id for tv in original_vars]
     if not vars:
@@ -110,7 +113,7 @@ def solve_constraints(
                 candidate = AnyType(TypeOfAny.special_form)
             res.append(candidate)
 
-    if not free_vars:
+    if not free_vars and not skip_unsatisfied:
         # Most of the validation for solutions is done in applytype.py, but here we can
         # quickly test solutions w.r.t. to upper bounds, and use the latter (if possible),
         # if solutions are actually not valid (due to poor inference context).
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 0a95ffdd50cf..0d162238450a 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -3748,3 +3748,22 @@ empty: Dict[NoReturn, NoReturn]
 def bar() -> Union[Dict[str, Any], Dict[int, Any]]:
     return empty
 [builtins fixtures/dict.pyi]
+
+[case testUpperBoundInferenceFallbackNotOverused]
+from typing import TypeVar, Protocol, List
+
+S = TypeVar("S", covariant=True)
+class Foo(Protocol[S]):
+    def foo(self) -> S: ...
+def foo(x: Foo[S]) -> S: ...
+
+T = TypeVar("T", bound="Base")
+class Base:
+    def foo(self: T) -> T: ...
+class C(Base):
+    pass
+
+def f(values: List[T]) -> T: ...
+x = foo(f([C()]))
+reveal_type(x)  # N: Revealed type is "__main__.C"
+[builtins fixtures/list.pyi]

From 5d4046477eb017fcb2cdbf64403a4e67308ef2ed Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 27 Oct 2023 18:36:08 +0100
Subject: [PATCH 201/288] Support PEP-646 and PEP-692 in the same callable
 (#16294)

Fixes https://github.com/python/mypy/issues/16285

I was not sure if it is important to support this, but taking into
account the current behavior is a crash, and that implementation is
quite simple, I think we should do this. Using this opportunity I also
improve related error messages a bit.
---
 mypy/semanal.py                         |   2 +-
 mypy/typeanal.py                        |  59 ++++++++------
 mypy/types.py                           |   7 +-
 test-data/unit/check-typevar-tuple.test | 104 +++++++++++++++++++++++-
 test-data/unit/semanal-types.test       |   2 +-
 5 files changed, 142 insertions(+), 32 deletions(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index 179ee7c70bfb..342d48256ff5 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -950,7 +950,7 @@ def remove_unpack_kwargs(self, defn: FuncDef, typ: CallableType) -> CallableType
             return typ
         last_type = get_proper_type(last_type.type)
         if not isinstance(last_type, TypedDictType):
-            self.fail("Unpack item in ** argument must be a TypedDict", defn)
+            self.fail("Unpack item in ** argument must be a TypedDict", last_type)
             new_arg_types = typ.arg_types[:-1] + [AnyType(TypeOfAny.from_error)]
             return typ.copy_modified(arg_types=new_arg_types)
         overlap = set(typ.arg_names) & set(last_type.items)
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index b16d0ac066b4..ceb276d3bdd4 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -987,33 +987,40 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type:
                     self.anal_star_arg_type(t.arg_types[-2], ARG_STAR, nested=nested),
                     self.anal_star_arg_type(t.arg_types[-1], ARG_STAR2, nested=nested),
                 ]
+                # If nested is True, it means we are analyzing a Callable[...] type, rather
+                # than a function definition type. We need to "unpack" ** TypedDict annotation
+                # here (for function definitions it is done in semanal).
+                if nested and isinstance(arg_types[-1], UnpackType):
+                    # TODO: it would be better to avoid this get_proper_type() call.
+                    unpacked = get_proper_type(arg_types[-1].type)
+                    if isinstance(unpacked, TypedDictType):
+                        arg_types[-1] = unpacked
+                        unpacked_kwargs = True
+                    arg_types = self.check_unpacks_in_list(arg_types)
             else:
-                arg_types = self.anal_array(t.arg_types, nested=nested, allow_unpack=True)
                 star_index = None
                 if ARG_STAR in arg_kinds:
                     star_index = arg_kinds.index(ARG_STAR)
                 star2_index = None
                 if ARG_STAR2 in arg_kinds:
                     star2_index = arg_kinds.index(ARG_STAR2)
-                validated_args: list[Type] = []
-                for i, at in enumerate(arg_types):
-                    if isinstance(at, UnpackType) and i not in (star_index, star2_index):
-                        self.fail(
-                            message_registry.INVALID_UNPACK_POSITION, at, code=codes.VALID_TYPE
-                        )
-                        validated_args.append(AnyType(TypeOfAny.from_error))
-                    else:
-                        if nested and isinstance(at, UnpackType) and i == star_index:
-                            # TODO: it would be better to avoid this get_proper_type() call.
-                            p_at = get_proper_type(at.type)
-                            if isinstance(p_at, TypedDictType) and not at.from_star_syntax:
-                                # Automatically detect Unpack[Foo] in Callable as backwards
-                                # compatible syntax for **Foo, if Foo is a TypedDict.
-                                at = p_at
-                                arg_kinds[i] = ARG_STAR2
-                                unpacked_kwargs = True
-                        validated_args.append(at)
-                arg_types = validated_args
+                arg_types = []
+                for i, ut in enumerate(t.arg_types):
+                    at = self.anal_type(
+                        ut, nested=nested, allow_unpack=i in (star_index, star2_index)
+                    )
+                    if nested and isinstance(at, UnpackType) and i == star_index:
+                        # TODO: it would be better to avoid this get_proper_type() call.
+                        p_at = get_proper_type(at.type)
+                        if isinstance(p_at, TypedDictType) and not at.from_star_syntax:
+                            # Automatically detect Unpack[Foo] in Callable as backwards
+                            # compatible syntax for **Foo, if Foo is a TypedDict.
+                            at = p_at
+                            arg_kinds[i] = ARG_STAR2
+                            unpacked_kwargs = True
+                    arg_types.append(at)
+                if nested:
+                    arg_types = self.check_unpacks_in_list(arg_types)
             # If there were multiple (invalid) unpacks, the arg types list will become shorter,
             # we need to trim the kinds/names as well to avoid crashes.
             arg_kinds = t.arg_kinds[: len(arg_types)]
@@ -1387,8 +1394,9 @@ def analyze_callable_args(
         names: list[str | None] = []
         seen_unpack = False
         unpack_types: list[Type] = []
-        invalid_unpacks = []
-        for arg in arglist.items:
+        invalid_unpacks: list[Type] = []
+        second_unpack_last = False
+        for i, arg in enumerate(arglist.items):
             if isinstance(arg, CallableArgument):
                 args.append(arg.typ)
                 names.append(arg.name)
@@ -1415,6 +1423,11 @@ def analyze_callable_args(
             ):
                 if seen_unpack:
                     # Multiple unpacks, preserve them, so we can give an error later.
+                    if i == len(arglist.items) - 1 and not invalid_unpacks:
+                        # Special case: if there are just two unpacks, and the second one appears
+                        # as last type argument, it can be still valid, if the second unpacked type
+                        # is a TypedDict. This should be checked by the caller.
+                        second_unpack_last = True
                     invalid_unpacks.append(arg)
                     continue
                 seen_unpack = True
@@ -1442,7 +1455,7 @@ def analyze_callable_args(
             names.append(None)
         for arg in invalid_unpacks:
             args.append(arg)
-            kinds.append(ARG_STAR)
+            kinds.append(ARG_STAR2 if second_unpack_last else ARG_STAR)
             names.append(None)
         # Note that arglist below is only used for error context.
         check_arg_names(names, [arglist] * len(args), self.fail, "Callable")
diff --git a/mypy/types.py b/mypy/types.py
index ae1a1f595fa2..43003a9a22b6 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -3268,15 +3268,16 @@ def visit_callable_type(self, t: CallableType) -> str:
             num_skip = 0
 
         s = ""
-        bare_asterisk = False
+        asterisk = False
         for i in range(len(t.arg_types) - num_skip):
             if s != "":
                 s += ", "
-            if t.arg_kinds[i].is_named() and not bare_asterisk:
+            if t.arg_kinds[i].is_named() and not asterisk:
                 s += "*, "
-                bare_asterisk = True
+                asterisk = True
             if t.arg_kinds[i] == ARG_STAR:
                 s += "*"
+                asterisk = True
             if t.arg_kinds[i] == ARG_STAR2:
                 s += "**"
             name = t.arg_names[i]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 4a281fbf0b49..1a2573898170 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -571,8 +571,7 @@ from typing_extensions import Unpack, TypeVarTuple
 
 Ts = TypeVarTuple("Ts")
 Us = TypeVarTuple("Us")
-a: Callable[[Unpack[Ts], Unpack[Us]], int]  # E: Var args may not appear after named or var args \
-                                            # E: More than one Unpack in a type is not allowed
+a: Callable[[Unpack[Ts], Unpack[Us]], int]  # E: More than one Unpack in a type is not allowed
 reveal_type(a)  # N: Revealed type is "def [Ts, Us] (*Unpack[Ts`-1]) -> builtins.int"
 b: Callable[[Unpack], int]  # E: Unpack[...] requires exactly one type argument
 reveal_type(b)  # N: Revealed type is "def (*Any) -> builtins.int"
@@ -730,8 +729,7 @@ A = Tuple[Unpack[Ts], Unpack[Us]]  # E: More than one Unpack in a type is not al
 x: A[int, str]
 reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
 
-B = Callable[[Unpack[Ts], Unpack[Us]], int]  # E: Var args may not appear after named or var args \
-                                             # E: More than one Unpack in a type is not allowed
+B = Callable[[Unpack[Ts], Unpack[Us]], int]  # E: More than one Unpack in a type is not allowed
 y: B[int, str]
 reveal_type(y)  # N: Revealed type is "def (builtins.int, builtins.str) -> builtins.int"
 
@@ -1912,3 +1910,101 @@ reveal_type(y)  # N: Revealed type is "__main__.C[builtins.int, Unpack[builtins.
 z = C[int]()  # E: Bad number of arguments, expected: at least 2, given: 1
 reveal_type(z)  # N: Revealed type is "__main__.C[Any, Unpack[builtins.tuple[Any, ...]], Any]"
 [builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleBothUnpacksSimple]
+from typing import Tuple
+from typing_extensions import Unpack, TypeVarTuple, TypedDict
+
+class Keywords(TypedDict):
+    a: str
+    b: str
+
+Ints = Tuple[int, ...]
+
+def f(*args: Unpack[Ints], other: str = "no", **kwargs: Unpack[Keywords]) -> None: ...
+reveal_type(f)  # N: Revealed type is "def (*args: builtins.int, other: builtins.str =, **kwargs: Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])"
+f(1, 2, a="a", b="b")  # OK
+f(1, 2, 3)  # E: Missing named argument "a" for "f" \
+            # E: Missing named argument "b" for "f"
+
+Ts = TypeVarTuple("Ts")
+def g(*args: Unpack[Ts], other: str = "no", **kwargs: Unpack[Keywords]) -> None: ...
+reveal_type(g)  # N: Revealed type is "def [Ts] (*args: Unpack[Ts`-1], other: builtins.str =, **kwargs: Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])"
+g(1, 2, a="a", b="b")  # OK
+g(1, 2, 3)  # E: Missing named argument "a" for "g" \
+            # E: Missing named argument "b" for "g"
+
+def bad(
+    *args: Unpack[Keywords],  # E: "Keywords" cannot be unpacked (must be tuple or TypeVarTuple)
+    **kwargs: Unpack[Ints],  # E: Unpack item in ** argument must be a TypedDict
+) -> None: ...
+reveal_type(bad)  # N: Revealed type is "def (*args: Any, **kwargs: Any)"
+
+def bad2(
+    one: int,
+    *args: Unpack[Keywords],  # E: "Keywords" cannot be unpacked (must be tuple or TypeVarTuple)
+    other: str = "no",
+     **kwargs: Unpack[Ints],  # E: Unpack item in ** argument must be a TypedDict
+) -> None: ...
+reveal_type(bad2)  # N: Revealed type is "def (one: builtins.int, *args: Any, other: builtins.str =, **kwargs: Any)"
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleBothUnpacksCallable]
+from typing import Callable, Tuple
+from typing_extensions import Unpack, TypedDict
+
+class Keywords(TypedDict):
+    a: str
+    b: str
+Ints = Tuple[int, ...]
+
+cb: Callable[[Unpack[Ints], Unpack[Keywords]], None]
+reveal_type(cb)  # N: Revealed type is "def (*builtins.int, **Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])"
+
+cb2: Callable[[int, Unpack[Ints], int, Unpack[Keywords]], None]
+reveal_type(cb2)  # N: Revealed type is "def (builtins.int, *Unpack[Tuple[Unpack[builtins.tuple[builtins.int, ...]], builtins.int]], **Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])"
+cb2(1, 2, 3, a="a", b="b")
+cb2(1, a="a", b="b")  # E: Too few arguments
+cb2(1, 2, 3, a="a")  # E: Missing named argument "b"
+
+bad1: Callable[[Unpack[Ints], Unpack[Ints]], None]  # E: More than one Unpack in a type is not allowed
+reveal_type(bad1)  # N: Revealed type is "def (*builtins.int)"
+bad2: Callable[[Unpack[Keywords], Unpack[Keywords]], None]  # E: "Keywords" cannot be unpacked (must be tuple or TypeVarTuple)
+reveal_type(bad2)  # N: Revealed type is "def (*Any, **Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])"
+bad3: Callable[[Unpack[Keywords], Unpack[Ints]], None]  # E: "Keywords" cannot be unpacked (must be tuple or TypeVarTuple) \
+                                                        # E: More than one Unpack in a type is not allowed
+reveal_type(bad3)  # N: Revealed type is "def (*Any)"
+[builtins fixtures/tuple.pyi]
+
+[case testTypeVarTupleBothUnpacksApplication]
+from typing import Callable, TypeVar, Optional
+from typing_extensions import Unpack, TypeVarTuple, TypedDict
+
+class Keywords(TypedDict):
+    a: str
+    b: str
+
+T = TypeVar("T")
+Ts = TypeVarTuple("Ts")
+def test(
+    x: int,
+    func: Callable[[Unpack[Ts]], T],
+    *args: Unpack[Ts],
+    other: Optional[str] = None,
+    **kwargs: Unpack[Keywords],
+) -> T:
+    if bool():
+        func(*args, **kwargs)  # E: Extra argument "a" from **args
+    return func(*args)
+def test2(
+    x: int,
+    func: Callable[[Unpack[Ts], Unpack[Keywords]], T],
+    *args: Unpack[Ts],
+    other: Optional[str] = None,
+    **kwargs: Unpack[Keywords],
+) -> T:
+    if bool():
+        func(*args)  # E: Missing named argument "a" \
+                     # E: Missing named argument "b"
+    return func(*args, **kwargs)
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test
index 5e05d099b958..83c44738f055 100644
--- a/test-data/unit/semanal-types.test
+++ b/test-data/unit/semanal-types.test
@@ -1043,7 +1043,7 @@ MypyFile:1(
       default(
         Var(y)
         StrExpr()))
-    def (*x: builtins.int, *, y: builtins.str =) -> Any
+    def (*x: builtins.int, y: builtins.str =) -> Any
     VarArg(
       Var(x))
     Block:1(

From 4f05dd506ee4cc8a9f38210be96e974fb8f54a6e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=B8rgen=20Lind?= <jorgen.lind@gmail.com>
Date: Fri, 27 Oct 2023 22:24:50 +0200
Subject: [PATCH 202/288] Write stubs with utf-8 encoding (#16329)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

This is to ensure that you don't get encoding errors if docstrings
contains odd characters like emojis.

---------

Co-authored-by: Jørgen Lind <jorgen@3lc.ai>
Co-authored-by: hauntsaninja <hauntsaninja@gmail.com>
---
 mypy/stubgen.py             | 2 +-
 test-data/unit/stubgen.test | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index a2f07a35eaa2..837cd723c410 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -1578,7 +1578,7 @@ def generate_stub_for_py_module(
     subdir = os.path.dirname(target)
     if subdir and not os.path.isdir(subdir):
         os.makedirs(subdir)
-    with open(target, "w") as file:
+    with open(target, "w", encoding="utf-8") as file:
         file.write(output)
 
 
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index 64a1353b29b3..895500c1ba57 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -3485,7 +3485,7 @@ def f2(): ...
 class A:
     """class docstring
 
-    a multiline docstring"""
+    a multiline 😊 docstring"""
     def func():
         """func docstring
         don't forget to indent"""
@@ -3512,7 +3512,7 @@ class B:
 class A:
     """class docstring
 
-    a multiline docstring"""
+    a multiline 😊 docstring"""
     def func() -> None:
         """func docstring
         don't forget to indent"""

From 5c6ca5cdee906ec7c57be478679cd689fdd15861 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 27 Oct 2023 23:58:48 +0100
Subject: [PATCH 203/288] Properly use proper subtyping for callables (#16343)

Fixes https://github.com/python/mypy/issues/16338

This is kind of a major change, but it is technically correct: we should
not treat `(*args: Any, **kwargs: Any)` special in `is_proper_subtype()`
(only in `is_subtype()`). Unfortunately, this requires an additional
flag for `is_callable_compatible()`, since currently we are passing the
subtype kind information via a callback, which is not applicable to
handling argument kinds.
---
 mypy/checker.py                       | 11 ++++++++---
 mypy/constraints.py                   | 12 ++++++++++--
 mypy/meet.py                          |  1 +
 mypy/subtypes.py                      | 14 +++++++++++---
 test-data/unit/check-overloading.test | 22 +++++++++++++++++++++-
 5 files changed, 51 insertions(+), 9 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 64bbbfa0a55b..e68dc4178962 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -800,7 +800,7 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None:
 
                 # Is the overload alternative's arguments subtypes of the implementation's?
                 if not is_callable_compatible(
-                    impl, sig1, is_compat=is_subtype, ignore_return=True
+                    impl, sig1, is_compat=is_subtype, is_proper_subtype=False, ignore_return=True
                 ):
                     self.msg.overloaded_signatures_arg_specific(i + 1, defn.impl)
 
@@ -7685,6 +7685,7 @@ def is_unsafe_overlapping_overload_signatures(
         signature,
         other,
         is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none,
+        is_proper_subtype=False,
         is_compat_return=lambda l, r: not is_subtype_no_promote(l, r),
         ignore_return=False,
         check_args_covariantly=True,
@@ -7694,6 +7695,7 @@ def is_unsafe_overlapping_overload_signatures(
         other,
         signature,
         is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none,
+        is_proper_subtype=False,
         is_compat_return=lambda l, r: not is_subtype_no_promote(r, l),
         ignore_return=False,
         check_args_covariantly=False,
@@ -7744,7 +7746,7 @@ def overload_can_never_match(signature: CallableType, other: CallableType) -> bo
         signature, {tvar.id: erase_def_to_union_or_bound(tvar) for tvar in signature.variables}
     )
     return is_callable_compatible(
-        exp_signature, other, is_compat=is_more_precise, ignore_return=True
+        exp_signature, other, is_compat=is_more_precise, is_proper_subtype=True, ignore_return=True
     )
 
 
@@ -7754,7 +7756,9 @@ def is_more_general_arg_prefix(t: FunctionLike, s: FunctionLike) -> bool:
     #      general than one with fewer items (or just one item)?
     if isinstance(t, CallableType):
         if isinstance(s, CallableType):
-            return is_callable_compatible(t, s, is_compat=is_proper_subtype, ignore_return=True)
+            return is_callable_compatible(
+                t, s, is_compat=is_proper_subtype, is_proper_subtype=True, ignore_return=True
+            )
     elif isinstance(t, FunctionLike):
         if isinstance(s, FunctionLike):
             if len(t.items) == len(s.items):
@@ -7769,6 +7773,7 @@ def is_same_arg_prefix(t: CallableType, s: CallableType) -> bool:
         t,
         s,
         is_compat=is_same_type,
+        is_proper_subtype=True,
         ignore_return=True,
         check_args_covariantly=True,
         ignore_pos_arg_names=True,
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 7d782551b261..6f611736a72a 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -1352,7 +1352,11 @@ def find_matching_overload_item(overloaded: Overloaded, template: CallableType)
         # Return type may be indeterminate in the template, so ignore it when performing a
         # subtype check.
         if mypy.subtypes.is_callable_compatible(
-            item, template, is_compat=mypy.subtypes.is_subtype, ignore_return=True
+            item,
+            template,
+            is_compat=mypy.subtypes.is_subtype,
+            is_proper_subtype=False,
+            ignore_return=True,
         ):
             return item
     # Fall back to the first item if we can't find a match. This is totally arbitrary --
@@ -1370,7 +1374,11 @@ def find_matching_overload_items(
         # Return type may be indeterminate in the template, so ignore it when performing a
         # subtype check.
         if mypy.subtypes.is_callable_compatible(
-            item, template, is_compat=mypy.subtypes.is_subtype, ignore_return=True
+            item,
+            template,
+            is_compat=mypy.subtypes.is_subtype,
+            is_proper_subtype=False,
+            ignore_return=True,
         ):
             res.append(item)
     if not res:
diff --git a/mypy/meet.py b/mypy/meet.py
index 1a566aed17de..fa9bd6a83743 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -462,6 +462,7 @@ def _type_object_overlap(left: Type, right: Type) -> bool:
             left,
             right,
             is_compat=_is_overlapping_types,
+            is_proper_subtype=False,
             ignore_pos_arg_names=True,
             allow_partial_overlap=True,
         )
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 2ca3357dd722..383e6eddd317 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -658,6 +658,8 @@ def visit_parameters(self, left: Parameters) -> bool:
                 left,
                 self.right,
                 is_compat=self._is_subtype,
+                # TODO: this should pass the current value, but then couple tests fail.
+                is_proper_subtype=False,
                 ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names,
             )
         else:
@@ -677,6 +679,7 @@ def visit_callable_type(self, left: CallableType) -> bool:
                 left,
                 right,
                 is_compat=self._is_subtype,
+                is_proper_subtype=self.proper_subtype,
                 ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names,
                 strict_concatenate=(self.options.extra_checks or self.options.strict_concatenate)
                 if self.options
@@ -932,6 +935,7 @@ def visit_overloaded(self, left: Overloaded) -> bool:
                                 left_item,
                                 right_item,
                                 is_compat=self._is_subtype,
+                                is_proper_subtype=self.proper_subtype,
                                 ignore_return=True,
                                 ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names,
                                 strict_concatenate=strict_concat,
@@ -940,6 +944,7 @@ def visit_overloaded(self, left: Overloaded) -> bool:
                                 right_item,
                                 left_item,
                                 is_compat=self._is_subtype,
+                                is_proper_subtype=self.proper_subtype,
                                 ignore_return=True,
                                 ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names,
                                 strict_concatenate=strict_concat,
@@ -1358,6 +1363,7 @@ def is_callable_compatible(
     right: CallableType,
     *,
     is_compat: Callable[[Type, Type], bool],
+    is_proper_subtype: bool,
     is_compat_return: Callable[[Type, Type], bool] | None = None,
     ignore_return: bool = False,
     ignore_pos_arg_names: bool = False,
@@ -1517,6 +1523,7 @@ def g(x: int) -> int: ...
         left,
         right,
         is_compat=is_compat,
+        is_proper_subtype=is_proper_subtype,
         ignore_pos_arg_names=ignore_pos_arg_names,
         allow_partial_overlap=allow_partial_overlap,
         strict_concatenate_check=strict_concatenate_check,
@@ -1552,12 +1559,13 @@ def are_parameters_compatible(
     right: Parameters | NormalizedCallableType,
     *,
     is_compat: Callable[[Type, Type], bool],
+    is_proper_subtype: bool,
     ignore_pos_arg_names: bool = False,
     allow_partial_overlap: bool = False,
     strict_concatenate_check: bool = False,
 ) -> bool:
     """Helper function for is_callable_compatible, used for Parameter compatibility"""
-    if right.is_ellipsis_args:
+    if right.is_ellipsis_args and not is_proper_subtype:
         return True
 
     left_star = left.var_arg()
@@ -1566,9 +1574,9 @@ def are_parameters_compatible(
     right_star2 = right.kw_arg()
 
     # Treat "def _(*a: Any, **kw: Any) -> X" similarly to "Callable[..., X]"
-    if are_trivial_parameters(right):
+    if are_trivial_parameters(right) and not is_proper_subtype:
         return True
-    trivial_suffix = is_trivial_suffix(right)
+    trivial_suffix = is_trivial_suffix(right) and not is_proper_subtype
 
     # Match up corresponding arguments and check them for compatibility. In
     # every pair (argL, argR) of corresponding arguments from L and R, argL must
diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test
index b97eeb48115c..7bca5cc7b508 100644
--- a/test-data/unit/check-overloading.test
+++ b/test-data/unit/check-overloading.test
@@ -6501,7 +6501,7 @@ eggs = lambda: 'eggs'
 reveal_type(func(eggs))  # N: Revealed type is "def (builtins.str) -> builtins.str"
 
 spam: Callable[..., str] = lambda x, y: 'baz'
-reveal_type(func(spam))  # N: Revealed type is "def (*Any, **Any) -> builtins.str"
+reveal_type(func(spam))  # N: Revealed type is "def (*Any, **Any) -> Any"
 [builtins fixtures/paramspec.pyi]
 
 [case testGenericOverloadOverlapWithType]
@@ -6673,3 +6673,23 @@ c2 = MyCallable("test")
 reveal_type(c2)  # N: Revealed type is "__main__.MyCallable[builtins.str]"
 reveal_type(c2()) # should be int  # N: Revealed type is "builtins.int"
 [builtins fixtures/tuple.pyi]
+
+[case testOverloadWithStarAnyFallback]
+from typing import overload, Any
+
+class A:
+    @overload
+    def f(self, e: str) -> str: ...
+    @overload
+    def f(self, *args: Any, **kwargs: Any) -> Any: ...
+    def f(self, *args, **kwargs):
+        pass
+
+class B:
+    @overload
+    def f(self, e: str, **kwargs: Any) -> str: ...
+    @overload
+    def f(self, *args: Any, **kwargs: Any) -> Any: ...
+    def f(self, *args, **kwargs):
+        pass
+[builtins fixtures/tuple.pyi]

From 42f7cf1a7228844f82f4de22ac94f0e1b5e3ed9b Mon Sep 17 00:00:00 2001
From: Cibin Mathew <10793628+cibinmathew@users.noreply.github.com>
Date: Sat, 28 Oct 2023 01:03:55 +0200
Subject: [PATCH 204/288] Update starred expr error message to match Python's
 (#16304)

Fixes https://github.com/python/mypy/issues/16287

Update mypy's error on starred expression to match that of Python 3.11
---
 mypy/semanal.py                      | 2 +-
 test-data/unit/check-statements.test | 2 +-
 test-data/unit/check-tuples.test     | 2 +-
 test-data/unit/semanal-errors.test   | 4 ++--
 4 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index 342d48256ff5..a114a5a1dcd4 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -4997,7 +4997,7 @@ def visit_dict_expr(self, expr: DictExpr) -> None:
 
     def visit_star_expr(self, expr: StarExpr) -> None:
         if not expr.valid:
-            self.fail("Can use starred expression only as assignment target", expr, blocker=True)
+            self.fail("can't use starred expression here", expr, blocker=True)
         else:
             expr.expr.accept(self)
 
diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test
index 023e2935a158..f5b47e7ab97f 100644
--- a/test-data/unit/check-statements.test
+++ b/test-data/unit/check-statements.test
@@ -2232,7 +2232,7 @@ def foo(x: int) -> Union[Generator[A, None, None], Generator[B, None, None]]:
     yield x  # E: Incompatible types in "yield" (actual type "int", expected type "Union[A, B]")
 
 [case testNoCrashOnStarRightHandSide]
-x = *(1, 2, 3)  # E: Can use starred expression only as assignment target
+x = *(1, 2, 3)  # E: can't use starred expression here
 [builtins fixtures/tuple.pyi]
 
 
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 76225360a7c1..7070ead43746 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -1118,7 +1118,7 @@ a = (0, *b, '')
 [builtins fixtures/tuple.pyi]
 
 [case testUnpackSyntaxError]
-*foo  # E: Can use starred expression only as assignment target
+*foo  # E: can't use starred expression here
 [builtins fixtures/tuple.pyi]
 
 [case testUnpackBases]
diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test
index f21ba5253437..82307f30877e 100644
--- a/test-data/unit/semanal-errors.test
+++ b/test-data/unit/semanal-errors.test
@@ -480,13 +480,13 @@ c = 1
 d = 1
 a = *b
 [out]
-main:4: error: Can use starred expression only as assignment target
+main:4: error: can't use starred expression here
 
 [case testStarExpressionInExp]
 a = 1
 *a + 1
 [out]
-main:2: error: Can use starred expression only as assignment target
+main:2: error: can't use starred expression here
 
 [case testInvalidDel1]
 x = 1

From 9011ca8b4dedc0e7177737b5265f69694afa91b5 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 28 Oct 2023 00:04:58 +0100
Subject: [PATCH 205/288] Delete recursive aliases flags (#16346)

FWIW I decided to keep the old tests (where possible), just to be sure
we will not re-introduce various crashes at function scope, where
recursive aliases are not allowed.
---
 mypy/main.py                           |  14 ---
 mypy/options.py                        |   4 -
 mypy/semanal.py                        |   4 +-
 mypy/semanal_namedtuple.py             |   6 +-
 mypy/semanal_newtype.py                |   3 +-
 mypy/semanal_typeddict.py              |  11 +-
 mypy/typeanal.py                       |   2 +-
 test-data/unit/check-classes.test      |  31 ++---
 test-data/unit/check-incremental.test  |  15 +--
 test-data/unit/check-namedtuple.test   | 168 +++++++++++++------------
 test-data/unit/check-newsemanal.test   |  96 +++++++-------
 test-data/unit/check-type-aliases.test |  44 ++++---
 test-data/unit/check-typeddict.test    |  34 ++---
 test-data/unit/check-unions.test       |   6 +-
 test-data/unit/cmdline.test            |   8 --
 15 files changed, 216 insertions(+), 230 deletions(-)

diff --git a/mypy/main.py b/mypy/main.py
index dff1a0362ba2..718eb5a7c0c1 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -999,15 +999,6 @@ def add_invertible_flag(
         action="store_true",
         help="Enable new experimental type inference algorithm",
     )
-    internals_group.add_argument(
-        "--disable-recursive-aliases",
-        action="store_true",
-        help="Disable experimental support for recursive type aliases",
-    )
-    # Deprecated reverse variant of the above.
-    internals_group.add_argument(
-        "--enable-recursive-aliases", action="store_true", help=argparse.SUPPRESS
-    )
     parser.add_argument(
         "--enable-incomplete-feature",
         action="append",
@@ -1392,11 +1383,6 @@ def set_strict_flags() -> None:
     if options.logical_deps:
         options.cache_fine_grained = True
 
-    if options.enable_recursive_aliases:
-        print(
-            "Warning: --enable-recursive-aliases is deprecated;"
-            " recursive types are enabled by default"
-        )
     if options.strict_concatenate and not strict_option_set:
         print("Warning: --strict-concatenate is deprecated; use --extra-checks instead")
 
diff --git a/mypy/options.py b/mypy/options.py
index cb0464d4dc06..3447b5dfb1f6 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -362,10 +362,6 @@ def __init__(self) -> None:
         self.many_errors_threshold = defaults.MANY_ERRORS_THRESHOLD
         # Enable new experimental type inference algorithm.
         self.new_type_inference = False
-        # Disable recursive type aliases (currently experimental)
-        self.disable_recursive_aliases = False
-        # Deprecated reverse version of the above, do not use.
-        self.enable_recursive_aliases = False
         # Export line-level, limited, fine-grained dependency information in cache data
         # (undocumented feature).
         self.export_ref_info = False
diff --git a/mypy/semanal.py b/mypy/semanal.py
index a114a5a1dcd4..27491ac695ae 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -3608,7 +3608,7 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool:
             )
             if not res:
                 return False
-            if not self.options.disable_recursive_aliases and not self.is_func_scope():
+            if not self.is_func_scope():
                 # Only marking incomplete for top-level placeholders makes recursive aliases like
                 # `A = Sequence[str | A]` valid here, similar to how we treat base classes in class
                 # definitions, allowing `class str(Sequence[str]): ...`
@@ -6296,7 +6296,7 @@ def process_placeholder(
     def cannot_resolve_name(self, name: str | None, kind: str, ctx: Context) -> None:
         name_format = f' "{name}"' if name else ""
         self.fail(f"Cannot resolve {kind}{name_format} (possible cyclic definition)", ctx)
-        if not self.options.disable_recursive_aliases and self.is_func_scope():
+        if self.is_func_scope():
             self.note("Recursive types are not allowed at function scope", ctx)
 
     def qualified_name(self, name: str) -> str:
diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py
index 51ea90e07f3d..80cf1c4e184a 100644
--- a/mypy/semanal_namedtuple.py
+++ b/mypy/semanal_namedtuple.py
@@ -182,8 +182,7 @@ def check_namedtuple_classdef(
                     # it would be inconsistent with type aliases.
                     analyzed = self.api.anal_type(
                         stmt.type,
-                        allow_placeholder=not self.options.disable_recursive_aliases
-                        and not self.api.is_func_scope(),
+                        allow_placeholder=not self.api.is_func_scope(),
                         prohibit_self_type="NamedTuple item type",
                     )
                     if analyzed is None:
@@ -450,8 +449,7 @@ def parse_namedtuple_fields_with_types(
                 # We never allow recursive types at function scope.
                 analyzed = self.api.anal_type(
                     type,
-                    allow_placeholder=not self.options.disable_recursive_aliases
-                    and not self.api.is_func_scope(),
+                    allow_placeholder=not self.api.is_func_scope(),
                     prohibit_self_type="NamedTuple item type",
                 )
                 # Workaround #4987 and avoid introducing a bogus UnboundType
diff --git a/mypy/semanal_newtype.py b/mypy/semanal_newtype.py
index 16c6c024800d..c9c0c46f7aee 100644
--- a/mypy/semanal_newtype.py
+++ b/mypy/semanal_newtype.py
@@ -207,8 +207,7 @@ def check_newtype_args(
             self.api.anal_type(
                 unanalyzed_type,
                 report_invalid_types=False,
-                allow_placeholder=not self.options.disable_recursive_aliases
-                and not self.api.is_func_scope(),
+                allow_placeholder=not self.api.is_func_scope(),
             )
         )
         should_defer = False
diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py
index a9a4cd868f27..51424d8800d2 100644
--- a/mypy/semanal_typeddict.py
+++ b/mypy/semanal_typeddict.py
@@ -228,10 +228,7 @@ def analyze_base_args(self, base: IndexExpr, ctx: Context) -> list[Type] | None:
                 self.fail("Invalid TypedDict type argument", ctx)
                 return None
             analyzed = self.api.anal_type(
-                type,
-                allow_required=True,
-                allow_placeholder=not self.options.disable_recursive_aliases
-                and not self.api.is_func_scope(),
+                type, allow_required=True, allow_placeholder=not self.api.is_func_scope()
             )
             if analyzed is None:
                 return None
@@ -307,8 +304,7 @@ def analyze_typeddict_classdef_fields(
                     analyzed = self.api.anal_type(
                         stmt.type,
                         allow_required=True,
-                        allow_placeholder=not self.options.disable_recursive_aliases
-                        and not self.api.is_func_scope(),
+                        allow_placeholder=not self.api.is_func_scope(),
                         prohibit_self_type="TypedDict item type",
                     )
                     if analyzed is None:
@@ -504,8 +500,7 @@ def parse_typeddict_fields_with_types(
             analyzed = self.api.anal_type(
                 type,
                 allow_required=True,
-                allow_placeholder=not self.options.disable_recursive_aliases
-                and not self.api.is_func_scope(),
+                allow_placeholder=not self.api.is_func_scope(),
                 prohibit_self_type="TypedDict item type",
             )
             if analyzed is None:
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index ceb276d3bdd4..03579404aac9 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -490,7 +490,7 @@ def cannot_resolve_type(self, t: UnboundType) -> None:
         #       need access to MessageBuilder here. Also move the similar
         #       message generation logic in semanal.py.
         self.api.fail(f'Cannot resolve name "{t.name}" (possible cyclic definition)', t)
-        if not self.options.disable_recursive_aliases and self.api.is_func_scope():
+        if self.api.is_func_scope():
             self.note("Recursive types are not allowed at function scope", t)
 
     def apply_concatenate_operator(self, t: UnboundType) -> Type:
diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test
index cd60ec7c9a9c..983cb8454a05 100644
--- a/test-data/unit/check-classes.test
+++ b/test-data/unit/check-classes.test
@@ -5002,12 +5002,13 @@ class A(Tuple[int, str]): pass
 -- -----------------------
 
 [case testCrashOnSelfRecursiveNamedTupleVar]
-# flags: --disable-recursive-aliases
 from typing import NamedTuple
 
-N = NamedTuple('N', [('x', N)]) # E: Cannot resolve name "N" (possible cyclic definition)
-n: N
-reveal_type(n) # N: Revealed type is "Tuple[Any, fallback=__main__.N]"
+def test() -> None:
+    N = NamedTuple('N', [('x', N)]) # E: Cannot resolve name "N" (possible cyclic definition) \
+                                    # N: Recursive types are not allowed at function scope
+    n: N
+    reveal_type(n) # N: Revealed type is "Tuple[Any, fallback=__main__.N@4]"
 [builtins fixtures/tuple.pyi]
 
 [case testCrashOnSelfRecursiveTypedDictVar]
@@ -5032,18 +5033,20 @@ lst = [n, m]
 [builtins fixtures/isinstancelist.pyi]
 
 [case testCorrectJoinOfSelfRecursiveTypedDicts]
-# flags: --disable-recursive-aliases
 from mypy_extensions import TypedDict
 
-class N(TypedDict):
-    x: N # E: Cannot resolve name "N" (possible cyclic definition)
-class M(TypedDict):
-    x: M # E: Cannot resolve name "M" (possible cyclic definition)
-
-n: N
-m: M
-lst = [n, m]
-reveal_type(lst[0]['x'])  # N: Revealed type is "Any"
+def test() -> None:
+    class N(TypedDict):
+        x: N  # E: Cannot resolve name "N" (possible cyclic definition) \
+              # N: Recursive types are not allowed at function scope
+    class M(TypedDict):
+        x: M  # E: Cannot resolve name "M" (possible cyclic definition) \
+              # N: Recursive types are not allowed at function scope
+
+    n: N
+    m: M
+    lst = [n, m]
+    reveal_type(lst[0]['x'])  # N: Revealed type is "Any"
 [builtins fixtures/isinstancelist.pyi]
 
 [case testCrashInForwardRefToNamedTupleWithIsinstance]
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index 801bbd4e77b4..f2625b869c19 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -4594,7 +4594,6 @@ def outer() -> None:
 [out2]
 
 [case testRecursiveAliasImported]
-# flags: --disable-recursive-aliases
 import a
 
 [file a.py]
@@ -4620,16 +4619,10 @@ B = List[A]
 
 [builtins fixtures/list.pyi]
 [out]
-tmp/lib.pyi:4: error: Module "other" has no attribute "B"
-tmp/other.pyi:3: error: Cannot resolve name "B" (possible cyclic definition)
 [out2]
-tmp/lib.pyi:4: error: Module "other" has no attribute "B"
-tmp/other.pyi:3: error: Cannot resolve name "B" (possible cyclic definition)
-tmp/a.py:3: note: Revealed type is "builtins.list[Any]"
-
-[case testRecursiveNamedTupleTypedDict-skip]
-# https://github.com/python/mypy/issues/7125
+tmp/a.py:3: note: Revealed type is "builtins.list[builtins.list[...]]"
 
+[case testRecursiveNamedTupleTypedDict]
 import a
 [file a.py]
 import lib
@@ -4641,7 +4634,7 @@ reveal_type(x.x['x'])
 [file lib.pyi]
 from typing import NamedTuple
 from other import B
-A = NamedTuple('A', [('x', B)])  # type: ignore
+A = NamedTuple('A', [('x', B)])
 [file other.pyi]
 from mypy_extensions import TypedDict
 from lib import A
@@ -4649,7 +4642,7 @@ B = TypedDict('B', {'x': A})
 [builtins fixtures/dict.pyi]
 [out]
 [out2]
-tmp/a.py:3: note: Revealed type is "Tuple[TypedDict('other.B', {'x': Any}), fallback=lib.A]"
+tmp/a.py:3: note: Revealed type is "Tuple[TypedDict('other.B', {'x': Tuple[..., fallback=lib.A]}), fallback=lib.A]"
 
 [case testFollowImportSkipNotInvalidatedOnPresent]
 # flags: --follow-imports=skip
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
index 9fa098b28dee..14e075339572 100644
--- a/test-data/unit/check-namedtuple.test
+++ b/test-data/unit/check-namedtuple.test
@@ -607,16 +607,18 @@ tmp/b.py:4: note: Revealed type is "Tuple[Any, fallback=a.N]"
 tmp/b.py:7: note: Revealed type is "Tuple[Any, fallback=a.N]"
 
 [case testSimpleSelfReferentialNamedTuple]
-# flags: --disable-recursive-aliases
 from typing import NamedTuple
-class MyNamedTuple(NamedTuple):
-    parent: 'MyNamedTuple' # E: Cannot resolve name "MyNamedTuple" (possible cyclic definition)
 
-def bar(nt: MyNamedTuple) -> MyNamedTuple:
-    return nt
+def test() -> None:
+    class MyNamedTuple(NamedTuple):
+        parent: 'MyNamedTuple'  # E: Cannot resolve name "MyNamedTuple" (possible cyclic definition) \
+                                # N: Recursive types are not allowed at function scope
 
-x: MyNamedTuple
-reveal_type(x.parent) # N: Revealed type is "Any"
+    def bar(nt: MyNamedTuple) -> MyNamedTuple:
+        return nt
+
+    x: MyNamedTuple
+    reveal_type(x.parent) # N: Revealed type is "Any"
 [builtins fixtures/tuple.pyi]
 
 -- Some crazy self-referential named tuples and types dicts
@@ -645,106 +647,111 @@ class B:
 [out]
 
 [case testSelfRefNT1]
-# flags: --disable-recursive-aliases
 from typing import Tuple, NamedTuple
 
-Node = NamedTuple('Node', [
-        ('name', str),
-        ('children', Tuple['Node', ...]), # E: Cannot resolve name "Node" (possible cyclic definition)
-    ])
-n: Node
-reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, ...], fallback=__main__.Node]"
+def test() -> None:
+    Node = NamedTuple('Node', [
+            ('name', str),
+            ('children', Tuple['Node', ...]),  # E: Cannot resolve name "Node" (possible cyclic definition) \
+                                               # N: Recursive types are not allowed at function scope
+        ])
+    n: Node
+    reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, ...], fallback=__main__.Node@4]"
 [builtins fixtures/tuple.pyi]
 
 [case testSelfRefNT2]
-# flags: --disable-recursive-aliases
 from typing import Tuple, NamedTuple
 
-A = NamedTuple('A', [
-        ('x', str),
-        ('y', Tuple['B', ...]), # E: Cannot resolve name "B" (possible cyclic definition)
-    ])
-class B(NamedTuple):
-    x: A
-    y: int
+def test() -> None:
+    A = NamedTuple('A', [
+            ('x', str),
+            ('y', Tuple['B', ...]),  # E: Cannot resolve name "B" (possible cyclic definition) \
+                                     # N: Recursive types are not allowed at function scope
+        ])
+    class B(NamedTuple):
+        x: A
+        y: int
 
-n: A
-reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, ...], fallback=__main__.A]"
+    n: A
+    reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, ...], fallback=__main__.A@4]"
 [builtins fixtures/tuple.pyi]
 
 [case testSelfRefNT3]
-# flags: --disable-recursive-aliases
 from typing import NamedTuple, Tuple
 
-class B(NamedTuple):
-    x: Tuple[A, int] # E: Cannot resolve name "A" (possible cyclic definition)
-    y: int
+def test() -> None:
+    class B(NamedTuple):
+        x: Tuple[A, int]  # E: Cannot resolve name "A" (possible cyclic definition) \
+                          # N: Recursive types are not allowed at function scope
+        y: int
 
-A = NamedTuple('A', [
-        ('x', str),
-        ('y', 'B'),
-    ])
-n: B
-m: A
-reveal_type(n.x) # N: Revealed type is "Tuple[Any, builtins.int]"
-reveal_type(m[0]) # N: Revealed type is "builtins.str"
-lst = [m, n]
-reveal_type(lst[0]) # N: Revealed type is "Tuple[builtins.object, builtins.object]"
+    A = NamedTuple('A', [
+            ('x', str),
+            ('y', 'B'),
+        ])
+    n: B
+    m: A
+    reveal_type(n.x) # N: Revealed type is "Tuple[Any, builtins.int]"
+    reveal_type(m[0]) # N: Revealed type is "builtins.str"
+    lst = [m, n]
+    reveal_type(lst[0]) # N: Revealed type is "Tuple[builtins.object, builtins.object]"
 [builtins fixtures/tuple.pyi]
 
 [case testSelfRefNT4]
-# flags: --disable-recursive-aliases
 from typing import NamedTuple
 
-class B(NamedTuple):
-    x: A # E: Cannot resolve name "A" (possible cyclic definition)
-    y: int
+def test() -> None:
+    class B(NamedTuple):
+        x: A  # E: Cannot resolve name "A" (possible cyclic definition) \
+              # N: Recursive types are not allowed at function scope
+        y: int
 
-class A(NamedTuple):
-    x: str
-    y: B
+    class A(NamedTuple):
+        x: str
+        y: B
 
-n: A
-reveal_type(n.y[0]) # N: Revealed type is "Any"
+    n: A
+    reveal_type(n.y[0]) # N: Revealed type is "Any"
 [builtins fixtures/tuple.pyi]
 
 [case testSelfRefNT5]
-# flags: --disable-recursive-aliases
 from typing import NamedTuple
 
-B = NamedTuple('B', [
-        ('x', A), # E: Cannot resolve name "A" (possible cyclic definition)  # E: Name "A" is used before definition
-        ('y', int),
-    ])
-A = NamedTuple('A', [
-        ('x', str),
-        ('y', 'B'),
-    ])
-n: A
-def f(m: B) -> None: pass
-reveal_type(n) # N: Revealed type is "Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B], fallback=__main__.A]"
-reveal_type(f) # N: Revealed type is "def (m: Tuple[Any, builtins.int, fallback=__main__.B])"
+def test() -> None:
+    B = NamedTuple('B', [
+            ('x', A),  # E: Cannot resolve name "A" (possible cyclic definition)  \
+                       # N: Recursive types are not allowed at function scope \
+                       # E: Name "A" is used before definition
+            ('y', int),
+        ])
+    A = NamedTuple('A', [
+            ('x', str),
+            ('y', 'B'),
+        ])
+    n: A
+    def f(m: B) -> None: pass
+    reveal_type(n) # N: Revealed type is "Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B@4], fallback=__main__.A@8]"
+    reveal_type(f) # N: Revealed type is "def (m: Tuple[Any, builtins.int, fallback=__main__.B@4])"
 [builtins fixtures/tuple.pyi]
 
 [case testRecursiveNamedTupleInBases]
-# flags: --disable-recursive-aliases
 from typing import List, NamedTuple, Union
 
-Exp = Union['A', 'B']  # E: Cannot resolve name "Exp" (possible cyclic definition) \
-                       # E: Cannot resolve name "A" (possible cyclic definition)
-class A(NamedTuple('A', [('attr', List[Exp])])): pass
-class B(NamedTuple('B', [('val', object)])): pass
+def test() -> None:
+    Exp = Union['A', 'B']  # E: Cannot resolve name "Exp" (possible cyclic definition) \
+                           # N: Recursive types are not allowed at function scope \
+                           # E: Cannot resolve name "A" (possible cyclic definition)
+    class A(NamedTuple('A', [('attr', List[Exp])])): pass
+    class B(NamedTuple('B', [('val', object)])): pass
 
-def my_eval(exp: Exp) -> int:
-    reveal_type(exp) # N: Revealed type is "Union[Any, Tuple[builtins.object, fallback=__main__.B]]"
+    exp: Exp
+    reveal_type(exp)  # N: Revealed type is "Union[Any, Tuple[builtins.object, fallback=__main__.B@6]]"
     if isinstance(exp, A):
-        my_eval(exp[0][0])
-        return my_eval(exp.attr[0])
+        reveal_type(exp[0][0])  # N: Revealed type is "Union[Any, Tuple[builtins.object, fallback=__main__.B@6]]"
+        reveal_type(exp.attr[0])  # N: Revealed type is "Union[Any, Tuple[builtins.object, fallback=__main__.B@6]]"
     if isinstance(exp, B):
-        return exp.val  # E: Incompatible return value type (got "object", expected "int")
-    return 0
-
-my_eval(A([B(1), B(2)])) # OK
+        reveal_type(exp.val)  # N: Revealed type is "builtins.object"
+    reveal_type(A([B(1), B(2)]))  # N: Revealed type is "Tuple[builtins.list[Union[Any, Tuple[builtins.object, fallback=__main__.B@6]]], fallback=__main__.A@5]"
 [builtins fixtures/isinstancelist.pyi]
 [out]
 
@@ -771,17 +778,18 @@ tp = NamedTuple('tp', [('x', int)])
 [out]
 
 [case testSubclassOfRecursiveNamedTuple]
-# flags: --disable-recursive-aliases
 from typing import List, NamedTuple
 
-class Command(NamedTuple):
-    subcommands: List['Command'] # E: Cannot resolve name "Command" (possible cyclic definition)
+def test() -> None:
+    class Command(NamedTuple):
+        subcommands: List['Command']  # E: Cannot resolve name "Command" (possible cyclic definition) \
+                                      # N: Recursive types are not allowed at function scope
 
-class HelpCommand(Command):
-    pass
+    class HelpCommand(Command):
+        pass
 
-hc = HelpCommand(subcommands=[])
-reveal_type(hc)  # N: Revealed type is "Tuple[builtins.list[Any], fallback=__main__.HelpCommand]"
+    hc = HelpCommand(subcommands=[])
+    reveal_type(hc)  # N: Revealed type is "Tuple[builtins.list[Any], fallback=__main__.HelpCommand@7]"
 [builtins fixtures/list.pyi]
 [out]
 
diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test
index ff8d346e74a1..f4d3b9df760e 100644
--- a/test-data/unit/check-newsemanal.test
+++ b/test-data/unit/check-newsemanal.test
@@ -434,13 +434,14 @@ def main() -> None:
             x  # E: Name "x" is not defined
 
 [case testNewAnalyzerCyclicDefinitions]
-# flags: --disable-recursive-aliases --disable-error-code used-before-def
+# flags: --disable-error-code used-before-def
 gx = gy  # E: Cannot resolve name "gy" (possible cyclic definition)
 gy = gx
 def main() -> None:
     class C:
         def meth(self) -> None:
-            lx = ly  # E: Cannot resolve name "ly" (possible cyclic definition)
+            lx = ly  # E: Cannot resolve name "ly" (possible cyclic definition) \
+                     # N: Recursive types are not allowed at function scope
             ly = lx
 
 [case testNewAnalyzerCyclicDefinitionCrossModule]
@@ -1495,22 +1496,25 @@ reveal_type(x[0][0])  # N: Revealed type is "__main__.C"
 [builtins fixtures/list.pyi]
 
 [case testNewAnalyzerAliasToNotReadyDirectBase]
-# flags: --disable-recursive-aliases --disable-error-code used-before-def
+# flags: --disable-error-code used-before-def
 from typing import List
 
-x: B
-B = List[C]
-class C(B): pass
+def test() -> None:
+    x: B
+    B = List[C]
+    class C(B): pass
 
-reveal_type(x)
-reveal_type(x[0][0])
+    reveal_type(x)
+    reveal_type(x[0][0])
 [builtins fixtures/list.pyi]
 [out]
-main:4: error: Cannot resolve name "B" (possible cyclic definition)
 main:5: error: Cannot resolve name "B" (possible cyclic definition)
-main:5: error: Cannot resolve name "C" (possible cyclic definition)
-main:8: note: Revealed type is "Any"
+main:5: note: Recursive types are not allowed at function scope
+main:6: error: Cannot resolve name "B" (possible cyclic definition)
+main:6: note: Recursive types are not allowed at function scope
+main:6: error: Cannot resolve name "C" (possible cyclic definition)
 main:9: note: Revealed type is "Any"
+main:10: note: Revealed type is "Any"
 
 [case testNewAnalyzerAliasToNotReadyTwoDeferralsFunction]
 # flags: --disable-error-code used-before-def
@@ -1530,25 +1534,21 @@ reveal_type(f)  # N: Revealed type is "def (x: builtins.list[a.C]) -> builtins.l
 [builtins fixtures/list.pyi]
 
 [case testNewAnalyzerAliasToNotReadyDirectBaseFunction]
-# flags: --disable-recursive-aliases --disable-error-code used-before-def
+# flags: --disable-error-code used-before-def
 import a
 [file a.py]
 from typing import List
 from b import D
 
 def f(x: B) -> List[B]: ...
-B = List[C] # E
+B = List[C]
 class C(B): pass
 
 [file b.py]
 from a import f
 class D: ...
-reveal_type(f)  # N
+reveal_type(f)  # N: Revealed type is "def (x: builtins.list[a.C]) -> builtins.list[builtins.list[a.C]]"
 [builtins fixtures/list.pyi]
-[out]
-tmp/b.py:3: note: Revealed type is "def (x: builtins.list[Any]) -> builtins.list[builtins.list[Any]]"
-tmp/a.py:5: error: Cannot resolve name "B" (possible cyclic definition)
-tmp/a.py:5: error: Cannot resolve name "C" (possible cyclic definition)
 
 [case testNewAnalyzerAliasToNotReadyMixed]
 from typing import List, Union
@@ -2118,25 +2118,29 @@ class B(List[C]):
 [builtins fixtures/list.pyi]
 
 [case testNewAnalyzerNewTypeForwardClassAliasDirect]
-# flags: --disable-recursive-aliases --disable-error-code used-before-def
+# flags: --disable-error-code used-before-def
 from typing import NewType, List
 
-x: D
-reveal_type(x[0][0])
+def test() -> None:
+    x: D
+    reveal_type(x[0][0])
 
-D = List[C]
-C = NewType('C', 'B')
+    D = List[C]
+    C = NewType('C', 'B')
 
-class B(D):
-    pass
+    class B(D):
+        pass
 [builtins fixtures/list.pyi]
 [out]
-main:4: error: Cannot resolve name "D" (possible cyclic definition)
-main:5: note: Revealed type is "Any"
-main:7: error: Cannot resolve name "D" (possible cyclic definition)
-main:7: error: Cannot resolve name "C" (possible cyclic definition)
-main:8: error: Argument 2 to NewType(...) must be a valid type
-main:8: error: Cannot resolve name "B" (possible cyclic definition)
+main:5: error: Cannot resolve name "D" (possible cyclic definition)
+main:5: note: Recursive types are not allowed at function scope
+main:6: note: Revealed type is "Any"
+main:8: error: Cannot resolve name "D" (possible cyclic definition)
+main:8: note: Recursive types are not allowed at function scope
+main:8: error: Cannot resolve name "C" (possible cyclic definition)
+main:9: error: Argument 2 to NewType(...) must be a valid type
+main:9: error: Cannot resolve name "B" (possible cyclic definition)
+main:9: note: Recursive types are not allowed at function scope
 
 -- Copied from check-classes.test (tricky corner cases).
 [case testNewAnalyzerNoCrashForwardRefToBrokenDoubleNewTypeClass]
@@ -2154,22 +2158,24 @@ class C:
 [builtins fixtures/dict.pyi]
 
 [case testNewAnalyzerForwardTypeAliasInBase]
-# flags: --disable-recursive-aliases
 from typing import List, Generic, TypeVar, NamedTuple
 T = TypeVar('T')
 
-class C(A, B): # E: Cannot resolve name "A" (possible cyclic definition)
-    pass
-class G(Generic[T]): pass
-A = G[C] # E: Cannot resolve name "A" (possible cyclic definition)
-class B(NamedTuple):
-    x: int
+def test() -> None:
+    class C(A, B):  # E: Cannot resolve name "A" (possible cyclic definition) \
+                    # N: Recursive types are not allowed at function scope
+        pass
+    class G(Generic[T]): pass
+    A = G[C]  # E: Cannot resolve name "A" (possible cyclic definition) \
+              # N: Recursive types are not allowed at function scope
+    class B(NamedTuple):
+        x: int
 
-y: C
-reveal_type(y.x)  # N: Revealed type is "builtins.int"
-reveal_type(y[0])  # N: Revealed type is "builtins.int"
-x: A
-reveal_type(x)  # N: Revealed type is "__main__.G[Tuple[builtins.int, fallback=__main__.C]]"
+    y: C
+    reveal_type(y.x)  # N: Revealed type is "builtins.int"
+    reveal_type(y[0])  # N: Revealed type is "builtins.int"
+    x: A
+    reveal_type(x)  # N: Revealed type is "__main__.G@7[Tuple[builtins.int, fallback=__main__.C@5]]"
 [builtins fixtures/list.pyi]
 
 [case testNewAnalyzerDuplicateTypeVar]
@@ -2584,9 +2590,9 @@ import n
 def __getattr__(x): pass
 
 [case testNewAnalyzerReportLoopInMRO2]
-# flags: --disable-recursive-aliases
 def f() -> None:
-    class A(A): ... # E: Cannot resolve name "A" (possible cyclic definition)
+    class A(A): ...  # E: Cannot resolve name "A" (possible cyclic definition) \
+                     # N: Recursive types are not allowed at function scope
 
 [case testNewAnalyzerUnsupportedBaseClassInsideFunction]
 class C:
diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test
index 3ca0c5ef0a4b..46f5ff07f1ac 100644
--- a/test-data/unit/check-type-aliases.test
+++ b/test-data/unit/check-type-aliases.test
@@ -197,30 +197,35 @@ Alias = Tuple[int, T]
 [out]
 
 [case testRecursiveAliasesErrors1]
-# flags: --disable-recursive-aliases
-# Recursive aliases are not supported yet.
 from typing import Type, Callable, Union
 
-A = Union[A, int] # E: Cannot resolve name "A" (possible cyclic definition)
-B = Callable[[B], int] # E: Cannot resolve name "B" (possible cyclic definition)
-C = Type[C] # E: Cannot resolve name "C" (possible cyclic definition)
+def test() -> None:
+    A = Union[A, int]  # E: Cannot resolve name "A" (possible cyclic definition) \
+                       # N: Recursive types are not allowed at function scope
+    B = Callable[[B], int]  # E: Cannot resolve name "B" (possible cyclic definition) \
+                            # N: Recursive types are not allowed at function scope
+    C = Type[C]  # E: Cannot resolve name "C" (possible cyclic definition) \
+                 # N: Recursive types are not allowed at function scope
 
 [case testRecursiveAliasesErrors2]
-# flags: --disable-recursive-aliases --disable-error-code=used-before-def
-# Recursive aliases are not supported yet.
+# flags: --disable-error-code=used-before-def
 from typing import Type, Callable, Union
 
-A = Union[B, int]
-B = Callable[[C], int]
-C = Type[A]
-x: A
-reveal_type(x)
+def test() -> None:
+    A = Union[B, int]
+    B = Callable[[C], int]
+    C = Type[A]
+    x: A
+    reveal_type(x)
 [out]
 main:5: error: Cannot resolve name "A" (possible cyclic definition)
+main:5: note: Recursive types are not allowed at function scope
 main:5: error: Cannot resolve name "B" (possible cyclic definition)
 main:6: error: Cannot resolve name "B" (possible cyclic definition)
+main:6: note: Recursive types are not allowed at function scope
 main:6: error: Cannot resolve name "C" (possible cyclic definition)
 main:7: error: Cannot resolve name "C" (possible cyclic definition)
+main:7: note: Recursive types are not allowed at function scope
 main:9: note: Revealed type is "Union[Any, builtins.int]"
 
 [case testDoubleForwardAlias]
@@ -245,13 +250,16 @@ reveal_type(x[0].x) # N: Revealed type is "builtins.str"
 [out]
 
 [case testJSONAliasApproximation]
-# flags: --disable-recursive-aliases
 from typing import List, Union, Dict
-x: JSON # E: Cannot resolve name "JSON" (possible cyclic definition)
-JSON = Union[int, str, List[JSON], Dict[str, JSON]] # E: Cannot resolve name "JSON" (possible cyclic definition)
-reveal_type(x) # N: Revealed type is "Any"
-if isinstance(x, list):
-    reveal_type(x) # N: Revealed type is "builtins.list[Any]"
+
+def test() -> None:
+    x: JSON  # E: Cannot resolve name "JSON" (possible cyclic definition)  \
+             # N: Recursive types are not allowed at function scope
+    JSON = Union[int, str, List[JSON], Dict[str, JSON]]  # E: Cannot resolve name "JSON" (possible cyclic definition) \
+                                                         # N: Recursive types are not allowed at function scope
+    reveal_type(x) # N: Revealed type is "Any"
+    if isinstance(x, list):
+        reveal_type(x) # N: Revealed type is "builtins.list[Any]"
 [builtins fixtures/isinstancelist.pyi]
 [out]
 
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index 0e1d800e0468..088b52db0473 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -1443,34 +1443,34 @@ reveal_type(x['a']['b']) # N: Revealed type is "builtins.int"
 [builtins fixtures/dict.pyi]
 
 [case testSelfRecursiveTypedDictInheriting]
-
 from mypy_extensions import TypedDict
-# flags: --disable-recursive-aliases
-class MovieBase(TypedDict):
-    name: str
-    year: int
 
-class Movie(MovieBase):
-    director: 'Movie' # E: Cannot resolve name "Movie" (possible cyclic definition)
+def test() -> None:
+    class MovieBase(TypedDict):
+        name: str
+        year: int
 
-m: Movie
-reveal_type(m['director']['name']) # N: Revealed type is "Any"
+    class Movie(MovieBase):
+        director: 'Movie' # E: Cannot resolve name "Movie" (possible cyclic definition) \
+                          # N: Recursive types are not allowed at function scope
+    m: Movie
+    reveal_type(m['director']['name']) # N: Revealed type is "Any"
 [builtins fixtures/dict.pyi]
-[out]
 
 [case testSubclassOfRecursiveTypedDict]
-# flags: --disable-recursive-aliases
 from typing import List
 from mypy_extensions import TypedDict
 
-class Command(TypedDict):
-    subcommands: List['Command']  # E: Cannot resolve name "Command" (possible cyclic definition)
+def test() -> None:
+    class Command(TypedDict):
+        subcommands: List['Command']  # E: Cannot resolve name "Command" (possible cyclic definition) \
+                                      # N: Recursive types are not allowed at function scope
 
-class HelpCommand(Command):
-    pass
+    class HelpCommand(Command):
+        pass
 
-hc = HelpCommand(subcommands=[])
-reveal_type(hc)  # N: Revealed type is "TypedDict('__main__.HelpCommand', {'subcommands': builtins.list[Any]})"
+    hc = HelpCommand(subcommands=[])
+    reveal_type(hc)  # N: Revealed type is "TypedDict('__main__.HelpCommand@8', {'subcommands': builtins.list[Any]})"
 [builtins fixtures/list.pyi]
 [out]
 
diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test
index f6fd27e59e4d..d79ab14184c6 100644
--- a/test-data/unit/check-unions.test
+++ b/test-data/unit/check-unions.test
@@ -1003,9 +1003,11 @@ def takes_int(arg: int) -> None: pass
 takes_int(x)  # E: Argument 1 to "takes_int" has incompatible type "Union[ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[int], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[object], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[float], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[str], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[Any], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[bytes]]"; expected "int"
 
 [case testRecursiveForwardReferenceInUnion]
-# flags: --disable-recursive-aliases
 from typing import List, Union
-MYTYPE = List[Union[str, "MYTYPE"]] # E: Cannot resolve name "MYTYPE" (possible cyclic definition)
+
+def test() -> None:
+    MYTYPE = List[Union[str, "MYTYPE"]]  # E: Cannot resolve name "MYTYPE" (possible cyclic definition) \
+                                         # N: Recursive types are not allowed at function scope
 [builtins fixtures/list.pyi]
 
 [case testNonStrictOptional]
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
index 42f0ee8a9ec6..cf5e3c438fac 100644
--- a/test-data/unit/cmdline.test
+++ b/test-data/unit/cmdline.test
@@ -1472,14 +1472,6 @@ note: A user-defined top-level module with name "typing" is not supported
 Failed to find builtin module mypy_extensions, perhaps typeshed is broken?
 == Return code: 2
 
-[case testRecursiveAliasesFlagDeprecated]
-# cmd: mypy --enable-recursive-aliases a.py
-[file a.py]
-pass
-[out]
-Warning: --enable-recursive-aliases is deprecated; recursive types are enabled by default
-== Return code: 0
-
 [case testNotesOnlyResultInExitSuccess]
 # cmd: mypy a.py
 [file a.py]

From 93d4cb0a2ef1723ce92f39ae61fe6a0c010eb90b Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 28 Oct 2023 14:15:28 +0100
Subject: [PATCH 206/288] Enable new type inference by default (#16345)

Fixes https://github.com/python/mypy/issues/15906

I am adding `--old-type-inference` so people can disable the flag if
they have issues (for few releases). IIRC there will be some fallback in
`mypy_primer`, but last time I checked it was all correct. Also I don't
remember if we need to update some tests, but we will see.
---
 mypy/checker.py                             |  6 +++---
 mypy/checkexpr.py                           | 12 ++++++++----
 mypy/main.py                                | 14 ++++++++++++--
 mypy/options.py                             |  6 ++++--
 mypy_self_check.ini                         |  1 -
 test-data/unit/check-generics.test          |  9 +++++++--
 test-data/unit/check-inference-context.test | 19 +++++++++++++++++++
 test-data/unit/cmdline.test                 |  8 ++++++++
 8 files changed, 61 insertions(+), 14 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index e68dc4178962..fd633b209438 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -4043,11 +4043,11 @@ def is_valid_defaultdict_partial_value_type(self, t: ProperType) -> bool:
             return True
         if len(t.args) == 1:
             arg = get_proper_type(t.args[0])
-            if self.options.new_type_inference:
-                allowed = isinstance(arg, (UninhabitedType, NoneType))
-            else:
+            if self.options.old_type_inference:
                 # Allow leaked TypeVars for legacy inference logic.
                 allowed = isinstance(arg, (UninhabitedType, NoneType, TypeVarType))
+            else:
+                allowed = isinstance(arg, (UninhabitedType, NoneType))
             if allowed:
                 return True
         return False
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index ddcaa6ee30c9..9ece4680f59e 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -343,7 +343,7 @@ def __init__(
         # on whether current expression is a callee, to give better error messages
         # related to type context.
         self.is_callee = False
-        type_state.infer_polymorphic = self.chk.options.new_type_inference
+        type_state.infer_polymorphic = not self.chk.options.old_type_inference
 
     def reset(self) -> None:
         self.resolved_type = {}
@@ -2082,7 +2082,7 @@ def infer_function_type_arguments(
                 elif not first_arg or not is_subtype(self.named_type("builtins.str"), first_arg):
                     self.chk.fail(message_registry.KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE, context)
 
-            if self.chk.options.new_type_inference and any(
+            if not self.chk.options.old_type_inference and any(
                 a is None
                 or isinstance(get_proper_type(a), UninhabitedType)
                 or set(get_type_vars(a)) & set(callee_type.variables)
@@ -2181,7 +2181,11 @@ def infer_function_type_arguments_pass2(
                 lambda a: self.accept(args[a]),
             )
 
-        arg_types = self.infer_arg_types_in_context(callee_type, args, arg_kinds, formal_to_actual)
+        # Same as during first pass, disable type errors (we still have partial context).
+        with self.msg.filter_errors():
+            arg_types = self.infer_arg_types_in_context(
+                callee_type, args, arg_kinds, formal_to_actual
+            )
 
         inferred_args, _ = infer_function_type_arguments(
             callee_type,
@@ -5230,7 +5234,7 @@ def infer_lambda_type_using_context(
         # they must be considered as indeterminate. We use ErasedType since it
         # does not affect type inference results (it is for purposes like this
         # only).
-        if self.chk.options.new_type_inference:
+        if not self.chk.options.old_type_inference:
             # With new type inference we can preserve argument types even if they
             # are generic, since new inference algorithm can handle constraints
             # like S <: T (we still erase return type since it's ultimately unknown).
diff --git a/mypy/main.py b/mypy/main.py
index 718eb5a7c0c1..43ab761072ca 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -995,9 +995,13 @@ def add_invertible_flag(
         help="Use a custom typing module",
     )
     internals_group.add_argument(
-        "--new-type-inference",
+        "--old-type-inference",
         action="store_true",
-        help="Enable new experimental type inference algorithm",
+        help="Disable new experimental type inference algorithm",
+    )
+    # Deprecated reverse variant of the above.
+    internals_group.add_argument(
+        "--new-type-inference", action="store_true", help=argparse.SUPPRESS
     )
     parser.add_argument(
         "--enable-incomplete-feature",
@@ -1383,6 +1387,12 @@ def set_strict_flags() -> None:
     if options.logical_deps:
         options.cache_fine_grained = True
 
+    if options.new_type_inference:
+        print(
+            "Warning: --new-type-inference flag is deprecated;"
+            " new type inference algorithm is already enabled by default"
+        )
+
     if options.strict_concatenate and not strict_option_set:
         print("Warning: --strict-concatenate is deprecated; use --extra-checks instead")
 
diff --git a/mypy/options.py b/mypy/options.py
index 3447b5dfb1f6..31d5d584f897 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -62,7 +62,7 @@ class BuildType:
     | {
         "platform",
         "bazel",
-        "new_type_inference",
+        "old_type_inference",
         "plugins",
         "disable_bytearray_promotion",
         "disable_memoryview_promotion",
@@ -360,7 +360,9 @@ def __init__(self) -> None:
         # skip most errors after this many messages have been reported.
         # -1 means unlimited.
         self.many_errors_threshold = defaults.MANY_ERRORS_THRESHOLD
-        # Enable new experimental type inference algorithm.
+        # Disable new experimental type inference algorithm.
+        self.old_type_inference = False
+        # Deprecated reverse version of the above, do not use.
         self.new_type_inference = False
         # Export line-level, limited, fine-grained dependency information in cache data
         # (undocumented feature).
diff --git a/mypy_self_check.ini b/mypy_self_check.ini
index 093926d4c415..7f1f9689a757 100644
--- a/mypy_self_check.ini
+++ b/mypy_self_check.ini
@@ -8,7 +8,6 @@ always_false = MYPYC
 plugins = mypy.plugins.proper_plugin
 python_version = 3.8
 exclude = mypy/typeshed/|mypyc/test-data/|mypyc/lib-rt/
-new_type_inference = True
 enable_error_code = ignore-without-code,redundant-expr
 enable_incomplete_feature = PreciseTupleTypes
 show_error_code_links = True
diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test
index 0781451e07ce..ef3f359e4989 100644
--- a/test-data/unit/check-generics.test
+++ b/test-data/unit/check-generics.test
@@ -2724,7 +2724,7 @@ def f(x: Callable[[G[T]], int]) -> T: ...
 class G(Generic[T]):
     def g(self, x: S) -> Union[S, T]: ...
 
-f(lambda x: x.g(0))  # E: Incompatible return value type (got "Union[int, T]", expected "int")
+reveal_type(f(lambda x: x.g(0)))  # N: Revealed type is "builtins.int"
 
 [case testDictStarInference]
 class B: ...
@@ -3059,6 +3059,10 @@ def dec5(f: Callable[[int], T]) -> Callable[[int], List[T]]:
         return [f(x)] * x
     return g
 
+I = TypeVar("I", bound=int)
+def dec4_bound(f: Callable[[I], List[T]]) -> Callable[[I], T]:
+    ...
+
 reveal_type(dec1(lambda x: x))  # N: Revealed type is "def [T] (T`3) -> builtins.list[T`3]"
 reveal_type(dec2(lambda x: x))  # N: Revealed type is "def [S] (S`4) -> builtins.list[S`4]"
 reveal_type(dec3(lambda x: x[0]))  # N: Revealed type is "def [S] (S`6) -> S`6"
@@ -3066,7 +3070,8 @@ reveal_type(dec4(lambda x: [x]))  # N: Revealed type is "def [S] (S`9) -> S`9"
 reveal_type(dec1(lambda x: 1))  # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]"
 reveal_type(dec5(lambda x: x))  # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]"
 reveal_type(dec3(lambda x: x))  # N: Revealed type is "def [S] (S`16) -> builtins.list[S`16]"
-dec4(lambda x: x)  # E: Incompatible return value type (got "S", expected "List[object]")
+reveal_type(dec4(lambda x: x))  # N: Revealed type is "def [T] (builtins.list[T`19]) -> T`19"
+dec4_bound(lambda x: x)  # E: Value of type variable "I" of "dec4_bound" cannot be "List[T]"
 [builtins fixtures/list.pyi]
 
 [case testInferenceAgainstGenericParamSpecBasicInList]
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
index 773a9ffd8274..a933acbf7f32 100644
--- a/test-data/unit/check-inference-context.test
+++ b/test-data/unit/check-inference-context.test
@@ -1305,6 +1305,25 @@ def g(l: List[C], x: str) -> Optional[C]:
     return f(l, lambda c: reveal_type(c).x)  # N: Revealed type is "__main__.C"
 [builtins fixtures/list.pyi]
 
+[case testPartialTypeContextWithTwoLambdas]
+from typing import Any, Generic, TypeVar, Callable
+
+def int_to_any(x: int) -> Any: ...
+def any_to_int(x: Any) -> int: ...
+def any_to_str(x: Any) -> str: ...
+
+T = TypeVar("T")
+class W(Generic[T]):
+    def __init__(
+        self, serialize: Callable[[T], Any], deserialize: Callable[[Any], T]
+    ) -> None:
+        ...
+reveal_type(W(lambda x: int_to_any(x), lambda x: any_to_int(x)))  # N: Revealed type is "__main__.W[builtins.int]"
+W(
+    lambda x: int_to_any(x),  # E: Argument 1 to "int_to_any" has incompatible type "str"; expected "int"
+    lambda x: any_to_str(x)
+)
+
 [case testWideOuterContextEmpty]
 from typing import List, TypeVar
 
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
index cf5e3c438fac..91242eb62fcf 100644
--- a/test-data/unit/cmdline.test
+++ b/test-data/unit/cmdline.test
@@ -1472,6 +1472,14 @@ note: A user-defined top-level module with name "typing" is not supported
 Failed to find builtin module mypy_extensions, perhaps typeshed is broken?
 == Return code: 2
 
+[case testNewTypeInferenceFlagDeprecated]
+# cmd: mypy --new-type-inference a.py
+[file a.py]
+pass
+[out]
+Warning: --new-type-inference flag is deprecated; new type inference algorithm is already enabled by default
+== Return code: 0
+
 [case testNotesOnlyResultInExitSuccess]
 # cmd: mypy a.py
 [file a.py]

From 6c7faf3af1c442c0802998cbf384f73b79d67478 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Sat, 28 Oct 2023 14:19:17 +0100
Subject: [PATCH 207/288] Skip expensive repr() in logging call when not needed
 (#16350)

We were spending quite a lot of time in this function when running
tests, based on profiling.
---
 mypy/build.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/mypy/build.py b/mypy/build.py
index 1385021aac48..605368a6dc51 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -255,7 +255,8 @@ def _build(
         stdout=stdout,
         stderr=stderr,
     )
-    manager.trace(repr(options))
+    if manager.verbosity() >= 2:
+        manager.trace(repr(options))
 
     reset_global_state()
     try:

From f33c9a3b97f8226eb0156d50be7885ad96815f7c Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 28 Oct 2023 14:26:05 +0100
Subject: [PATCH 208/288] Some final touches for variadic types support
 (#16334)

I decided to go again over various parts of variadic types
implementation to double-check nothing is missing, checked interaction
with various "advanced" features (dataclasses, protocols, self-types,
match statement, etc.), added some more tests (including incremental),
and `grep`ed for potentially unhandled cases (and did found few
crashes). This mostly touches only variadic types but one thing goes
beyond, the fix for self-types upper bound, I think it is correct and
should be safe.

If there are no objections, next PR will flip the switch.

---------

Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
---
 mypy/applytype.py                       |   7 +
 mypy/checker.py                         |  39 +++---
 mypy/checkexpr.py                       |   7 +-
 mypy/checkpattern.py                    | 111 ++++++++++++---
 mypy/constraints.py                     |  19 ++-
 mypy/erasetype.py                       |   4 +-
 mypy/join.py                            |  43 +++++-
 mypy/maptype.py                         |   3 +
 mypy/meet.py                            |  21 ++-
 mypy/semanal_shared.py                  |  20 ++-
 mypy/subtypes.py                        |  22 ++-
 mypy/typeops.py                         |   3 +-
 mypy/types_utils.py                     |   4 +-
 mypy/typevars.py                        |  19 ++-
 test-data/unit/check-incremental.test   |  43 ++++++
 test-data/unit/check-python310.test     | 117 ++++++++++++++++
 test-data/unit/check-selftype.test      |  20 +++
 test-data/unit/check-typevar-tuple.test | 174 ++++++++++++++++++++++++
 test-data/unit/fine-grained.test        | 122 +++++++++++++++++
 19 files changed, 726 insertions(+), 72 deletions(-)

diff --git a/mypy/applytype.py b/mypy/applytype.py
index 884be287e33d..c7da67d6140b 100644
--- a/mypy/applytype.py
+++ b/mypy/applytype.py
@@ -3,6 +3,7 @@
 from typing import Callable, Sequence
 
 import mypy.subtypes
+from mypy.erasetype import erase_typevars
 from mypy.expandtype import expand_type
 from mypy.nodes import Context
 from mypy.types import (
@@ -62,6 +63,11 @@ def get_target_type(
         report_incompatible_typevar_value(callable, type, tvar.name, context)
     else:
         upper_bound = tvar.upper_bound
+        if tvar.name == "Self":
+            # Internally constructed Self-types contain class type variables in upper bound,
+            # so we need to erase them to avoid false positives. This is safe because we do
+            # not support type variables in upper bounds of user defined types.
+            upper_bound = erase_typevars(upper_bound)
         if not mypy.subtypes.is_subtype(type, upper_bound):
             if skip_unsatisfied:
                 return None
@@ -121,6 +127,7 @@ def apply_generic_arguments(
     # Apply arguments to argument types.
     var_arg = callable.var_arg()
     if var_arg is not None and isinstance(var_arg.typ, UnpackType):
+        # Same as for ParamSpec, callable with variadic types needs to be expanded as a whole.
         callable = expand_type(callable, id_to_type)
         assert isinstance(callable, CallableType)
         return callable.copy_modified(variables=[tv for tv in tvars if tv.id not in id_to_type])
diff --git a/mypy/checker.py b/mypy/checker.py
index fd633b209438..62ba642256bf 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -1852,7 +1852,6 @@ def expand_typevars(
         if defn.info:
             # Class type variables
             tvars += defn.info.defn.type_vars or []
-        # TODO(PEP612): audit for paramspec
         for tvar in tvars:
             if isinstance(tvar, TypeVarType) and tvar.values:
                 subst.append([(tvar.id, value) for value in tvar.values])
@@ -2538,6 +2537,9 @@ def check_protocol_variance(self, defn: ClassDef) -> None:
         object_type = Instance(info.mro[-1], [])
         tvars = info.defn.type_vars
         for i, tvar in enumerate(tvars):
+            if not isinstance(tvar, TypeVarType):
+                # Variance of TypeVarTuple and ParamSpec is underspecified by PEPs.
+                continue
             up_args: list[Type] = [
                 object_type if i == j else AnyType(TypeOfAny.special_form)
                 for j, _ in enumerate(tvars)
@@ -2554,7 +2556,7 @@ def check_protocol_variance(self, defn: ClassDef) -> None:
                 expected = CONTRAVARIANT
             else:
                 expected = INVARIANT
-            if isinstance(tvar, TypeVarType) and expected != tvar.variance:
+            if expected != tvar.variance:
                 self.msg.bad_proto_variance(tvar.variance, tvar.name, expected, defn)
 
     def check_multiple_inheritance(self, typ: TypeInfo) -> None:
@@ -6695,19 +6697,6 @@ def check_possible_missing_await(
                 return
         self.msg.possible_missing_await(context, code)
 
-    def contains_none(self, t: Type) -> bool:
-        t = get_proper_type(t)
-        return (
-            isinstance(t, NoneType)
-            or (isinstance(t, UnionType) and any(self.contains_none(ut) for ut in t.items))
-            or (isinstance(t, TupleType) and any(self.contains_none(tt) for tt in t.items))
-            or (
-                isinstance(t, Instance)
-                and bool(t.args)
-                and any(self.contains_none(it) for it in t.args)
-            )
-        )
-
     def named_type(self, name: str) -> Instance:
         """Return an instance type with given name and implicit Any type args.
 
@@ -7471,10 +7460,22 @@ def builtin_item_type(tp: Type) -> Type | None:
                 return None
             if not isinstance(get_proper_type(tp.args[0]), AnyType):
                 return tp.args[0]
-    elif isinstance(tp, TupleType) and all(
-        not isinstance(it, AnyType) for it in get_proper_types(tp.items)
-    ):
-        return make_simplified_union(tp.items)  # this type is not externally visible
+    elif isinstance(tp, TupleType):
+        normalized_items = []
+        for it in tp.items:
+            # This use case is probably rare, but not handling unpacks here can cause crashes.
+            if isinstance(it, UnpackType):
+                unpacked = get_proper_type(it.type)
+                if isinstance(unpacked, TypeVarTupleType):
+                    unpacked = get_proper_type(unpacked.upper_bound)
+                assert (
+                    isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple"
+                )
+                normalized_items.append(unpacked.args[0])
+            else:
+                normalized_items.append(it)
+        if all(not isinstance(it, AnyType) for it in get_proper_types(normalized_items)):
+            return make_simplified_union(normalized_items)  # this type is not externally visible
     elif isinstance(tp, TypedDictType):
         # TypedDict always has non-optional string keys. Find the key type from the Mapping
         # base class.
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 9ece4680f59e..df6000050986 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -410,7 +410,7 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type:
             result = self.alias_type_in_runtime_context(
                 node, ctx=e, alias_definition=e.is_alias_rvalue or lvalue
             )
-        elif isinstance(node, (TypeVarExpr, ParamSpecExpr)):
+        elif isinstance(node, (TypeVarExpr, ParamSpecExpr, TypeVarTupleExpr)):
             result = self.object_type()
         else:
             if isinstance(node, PlaceholderNode):
@@ -3316,6 +3316,7 @@ def infer_literal_expr_type(self, value: LiteralValue, fallback_name: str) -> Ty
 
     def concat_tuples(self, left: TupleType, right: TupleType) -> TupleType:
         """Concatenate two fixed length tuples."""
+        assert not (find_unpack_in_list(left.items) and find_unpack_in_list(right.items))
         return TupleType(
             items=left.items + right.items, fallback=self.named_type("builtins.tuple")
         )
@@ -6507,8 +6508,8 @@ def merge_typevars_in_callables_by_name(
             for tv in target.variables:
                 name = tv.fullname
                 if name not in unique_typevars:
-                    # TODO(PEP612): fix for ParamSpecType
-                    if isinstance(tv, ParamSpecType):
+                    # TODO: support ParamSpecType and TypeVarTuple.
+                    if isinstance(tv, (ParamSpecType, TypeVarTupleType)):
                         continue
                     assert isinstance(tv, TypeVarType)
                     unique_typevars[name] = tv
diff --git a/mypy/checkpattern.py b/mypy/checkpattern.py
index 3f9a99b21530..c0061f1c3e72 100644
--- a/mypy/checkpattern.py
+++ b/mypy/checkpattern.py
@@ -45,9 +45,13 @@
     Type,
     TypedDictType,
     TypeOfAny,
+    TypeVarTupleType,
     UninhabitedType,
     UnionType,
+    UnpackType,
+    find_unpack_in_list,
     get_proper_type,
+    split_with_prefix_and_suffix,
 )
 from mypy.typevars import fill_typevars
 from mypy.visitor import PatternVisitor
@@ -239,13 +243,29 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType:
         #
         # get inner types of original type
         #
+        unpack_index = None
         if isinstance(current_type, TupleType):
             inner_types = current_type.items
-            size_diff = len(inner_types) - required_patterns
-            if size_diff < 0:
-                return self.early_non_match()
-            elif size_diff > 0 and star_position is None:
-                return self.early_non_match()
+            unpack_index = find_unpack_in_list(inner_types)
+            if unpack_index is None:
+                size_diff = len(inner_types) - required_patterns
+                if size_diff < 0:
+                    return self.early_non_match()
+                elif size_diff > 0 and star_position is None:
+                    return self.early_non_match()
+            else:
+                normalized_inner_types = []
+                for it in inner_types:
+                    # Unfortunately, it is not possible to "split" the TypeVarTuple
+                    # into individual items, so we just use its upper bound for the whole
+                    # analysis instead.
+                    if isinstance(it, UnpackType) and isinstance(it.type, TypeVarTupleType):
+                        it = UnpackType(it.type.upper_bound)
+                    normalized_inner_types.append(it)
+                inner_types = normalized_inner_types
+                current_type = current_type.copy_modified(items=normalized_inner_types)
+                if len(inner_types) - 1 > required_patterns and star_position is None:
+                    return self.early_non_match()
         else:
             inner_type = self.get_sequence_type(current_type, o)
             if inner_type is None:
@@ -270,10 +290,10 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType:
             self.update_type_map(captures, type_map)
 
         new_inner_types = self.expand_starred_pattern_types(
-            contracted_new_inner_types, star_position, len(inner_types)
+            contracted_new_inner_types, star_position, len(inner_types), unpack_index is not None
         )
         rest_inner_types = self.expand_starred_pattern_types(
-            contracted_rest_inner_types, star_position, len(inner_types)
+            contracted_rest_inner_types, star_position, len(inner_types), unpack_index is not None
         )
 
         #
@@ -281,7 +301,7 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType:
         #
         new_type: Type
         rest_type: Type = current_type
-        if isinstance(current_type, TupleType):
+        if isinstance(current_type, TupleType) and unpack_index is None:
             narrowed_inner_types = []
             inner_rest_types = []
             for inner_type, new_inner_type in zip(inner_types, new_inner_types):
@@ -301,6 +321,14 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType:
             if all(is_uninhabited(typ) for typ in inner_rest_types):
                 # All subpatterns always match, so we can apply negative narrowing
                 rest_type = TupleType(rest_inner_types, current_type.partial_fallback)
+        elif isinstance(current_type, TupleType):
+            # For variadic tuples it is too tricky to match individual items like for fixed
+            # tuples, so we instead try to narrow the entire type.
+            # TODO: use more precise narrowing when possible (e.g. for identical shapes).
+            new_tuple_type = TupleType(new_inner_types, current_type.partial_fallback)
+            new_type, rest_type = self.chk.conditional_types_with_intersection(
+                new_tuple_type, [get_type_range(current_type)], o, default=new_tuple_type
+            )
         else:
             new_inner_type = UninhabitedType()
             for typ in new_inner_types:
@@ -345,17 +373,45 @@ def contract_starred_pattern_types(
 
         If star_pos in None the types are returned unchanged.
         """
-        if star_pos is None:
-            return types
-        new_types = types[:star_pos]
-        star_length = len(types) - num_patterns
-        new_types.append(make_simplified_union(types[star_pos : star_pos + star_length]))
-        new_types += types[star_pos + star_length :]
-
-        return new_types
+        unpack_index = find_unpack_in_list(types)
+        if unpack_index is not None:
+            # Variadic tuples require "re-shaping" to match the requested pattern.
+            unpack = types[unpack_index]
+            assert isinstance(unpack, UnpackType)
+            unpacked = get_proper_type(unpack.type)
+            # This should be guaranteed by the normalization in the caller.
+            assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple"
+            if star_pos is None:
+                missing = num_patterns - len(types) + 1
+                new_types = types[:unpack_index]
+                new_types += [unpacked.args[0]] * missing
+                new_types += types[unpack_index + 1 :]
+                return new_types
+            prefix, middle, suffix = split_with_prefix_and_suffix(
+                tuple([UnpackType(unpacked) if isinstance(t, UnpackType) else t for t in types]),
+                star_pos,
+                num_patterns - star_pos,
+            )
+            new_middle = []
+            for m in middle:
+                # The existing code expects the star item type, rather than the type of
+                # the whole tuple "slice".
+                if isinstance(m, UnpackType):
+                    new_middle.append(unpacked.args[0])
+                else:
+                    new_middle.append(m)
+            return list(prefix) + [make_simplified_union(new_middle)] + list(suffix)
+        else:
+            if star_pos is None:
+                return types
+            new_types = types[:star_pos]
+            star_length = len(types) - num_patterns
+            new_types.append(make_simplified_union(types[star_pos : star_pos + star_length]))
+            new_types += types[star_pos + star_length :]
+            return new_types
 
     def expand_starred_pattern_types(
-        self, types: list[Type], star_pos: int | None, num_types: int
+        self, types: list[Type], star_pos: int | None, num_types: int, original_unpack: bool
     ) -> list[Type]:
         """Undoes the contraction done by contract_starred_pattern_types.
 
@@ -364,6 +420,17 @@ def expand_starred_pattern_types(
         """
         if star_pos is None:
             return types
+        if original_unpack:
+            # In the case where original tuple type has an unpack item, it is not practical
+            # to coerce pattern type back to the original shape (and may not even be possible),
+            # so we only restore the type of the star item.
+            res = []
+            for i, t in enumerate(types):
+                if i != star_pos:
+                    res.append(t)
+                else:
+                    res.append(UnpackType(self.chk.named_generic_type("builtins.tuple", [t])))
+            return res
         new_types = types[:star_pos]
         star_length = num_types - len(types) + 1
         new_types += [types[star_pos]] * star_length
@@ -459,7 +526,15 @@ def visit_class_pattern(self, o: ClassPattern) -> PatternType:
             return self.early_non_match()
         if isinstance(type_info, TypeInfo):
             any_type = AnyType(TypeOfAny.implementation_artifact)
-            typ: Type = Instance(type_info, [any_type] * len(type_info.defn.type_vars))
+            args: list[Type] = []
+            for tv in type_info.defn.type_vars:
+                if isinstance(tv, TypeVarTupleType):
+                    args.append(
+                        UnpackType(self.chk.named_generic_type("builtins.tuple", [any_type]))
+                    )
+                else:
+                    args.append(any_type)
+            typ: Type = Instance(type_info, args)
         elif isinstance(type_info, TypeAlias):
             typ = type_info.target
         elif (
diff --git a/mypy/constraints.py b/mypy/constraints.py
index 6f611736a72a..49e542a49e56 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -28,6 +28,7 @@
     Instance,
     LiteralType,
     NoneType,
+    NormalizedCallableType,
     Overloaded,
     Parameters,
     ParamSpecType,
@@ -1388,7 +1389,7 @@ def find_matching_overload_items(
     return res
 
 
-def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo | None:
+def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo:
     """Get builtins.tuple type from available types to construct homogeneous tuples."""
     tp = get_proper_type(unpack.type)
     if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple":
@@ -1399,10 +1400,10 @@ def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo | None:
         for base in tp.partial_fallback.type.mro:
             if base.fullname == "builtins.tuple":
                 return base
-    return None
+    assert False, "Invalid unpack type"
 
 
-def repack_callable_args(callable: CallableType, tuple_type: TypeInfo | None) -> list[Type]:
+def repack_callable_args(callable: CallableType, tuple_type: TypeInfo) -> list[Type]:
     """Present callable with star unpack in a normalized form.
 
     Since positional arguments cannot follow star argument, they are packed in a suffix,
@@ -1417,12 +1418,8 @@ def repack_callable_args(callable: CallableType, tuple_type: TypeInfo | None) ->
     star_type = callable.arg_types[star_index]
     suffix_types = []
     if not isinstance(star_type, UnpackType):
-        if tuple_type is not None:
-            # Re-normalize *args: X -> *args: *tuple[X, ...]
-            star_type = UnpackType(Instance(tuple_type, [star_type]))
-        else:
-            # This is unfortunate, something like tuple[Any, ...] would be better.
-            star_type = UnpackType(AnyType(TypeOfAny.from_error))
+        # Re-normalize *args: X -> *args: *tuple[X, ...]
+        star_type = UnpackType(Instance(tuple_type, [star_type]))
     else:
         tp = get_proper_type(star_type.type)
         if isinstance(tp, TupleType):
@@ -1544,7 +1541,9 @@ def infer_directed_arg_constraints(left: Type, right: Type, direction: int) -> l
 
 
 def infer_callable_arguments_constraints(
-    template: CallableType | Parameters, actual: CallableType | Parameters, direction: int
+    template: NormalizedCallableType | Parameters,
+    actual: NormalizedCallableType | Parameters,
+    direction: int,
 ) -> list[Constraint]:
     """Infer constraints between argument types of two callables.
 
diff --git a/mypy/erasetype.py b/mypy/erasetype.py
index 7231ede66c65..b41eefcd4821 100644
--- a/mypy/erasetype.py
+++ b/mypy/erasetype.py
@@ -100,7 +100,9 @@ def visit_parameters(self, t: Parameters) -> ProperType:
         raise RuntimeError("Parameters should have been bound to a class")
 
     def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType:
-        return AnyType(TypeOfAny.special_form)
+        # Likely, we can never get here because of aggressive erasure of types that
+        # can contain this, but better still return a valid replacement.
+        return t.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)])
 
     def visit_unpack_type(self, t: UnpackType) -> ProperType:
         return AnyType(TypeOfAny.special_form)
diff --git a/mypy/join.py b/mypy/join.py
index 2e2939f9fbc8..d33cbd98726d 100644
--- a/mypy/join.py
+++ b/mypy/join.py
@@ -36,6 +36,7 @@
     TypedDictType,
     TypeOfAny,
     TypeType,
+    TypeVarLikeType,
     TypeVarTupleType,
     TypeVarType,
     TypeVisitor,
@@ -715,11 +716,9 @@ def is_similar_callables(t: CallableType, s: CallableType) -> bool:
 
 
 def join_similar_callables(t: CallableType, s: CallableType) -> CallableType:
-    from mypy.meet import meet_types
-
     arg_types: list[Type] = []
     for i in range(len(t.arg_types)):
-        arg_types.append(meet_types(t.arg_types[i], s.arg_types[i]))
+        arg_types.append(safe_meet(t.arg_types[i], s.arg_types[i]))
     # TODO in combine_similar_callables also applies here (names and kinds; user metaclasses)
     # The fallback type can be either 'function', 'type', or some user-provided metaclass.
     # The result should always use 'function' as a fallback if either operands are using it.
@@ -736,10 +735,42 @@ def join_similar_callables(t: CallableType, s: CallableType) -> CallableType:
     )
 
 
+def safe_join(t: Type, s: Type) -> Type:
+    # This is a temporary solution to prevent crashes in combine_similar_callables() etc.,
+    # until relevant TODOs on handling arg_kinds will be addressed there.
+    if not isinstance(t, UnpackType) and not isinstance(s, UnpackType):
+        return join_types(t, s)
+    if isinstance(t, UnpackType) and isinstance(s, UnpackType):
+        return UnpackType(join_types(t.type, s.type))
+    return object_or_any_from_type(get_proper_type(t))
+
+
+def safe_meet(t: Type, s: Type) -> Type:
+    # Similar to above but for meet_types().
+    from mypy.meet import meet_types
+
+    if not isinstance(t, UnpackType) and not isinstance(s, UnpackType):
+        return meet_types(t, s)
+    if isinstance(t, UnpackType) and isinstance(s, UnpackType):
+        unpacked = get_proper_type(t.type)
+        if isinstance(unpacked, TypeVarTupleType):
+            fallback_type = unpacked.tuple_fallback.type
+        elif isinstance(unpacked, TupleType):
+            fallback_type = unpacked.partial_fallback.type
+        else:
+            assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple"
+            fallback_type = unpacked.type
+        res = meet_types(t.type, s.type)
+        if isinstance(res, UninhabitedType):
+            res = Instance(fallback_type, [res])
+        return UnpackType(res)
+    return UninhabitedType()
+
+
 def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType:
     arg_types: list[Type] = []
     for i in range(len(t.arg_types)):
-        arg_types.append(join_types(t.arg_types[i], s.arg_types[i]))
+        arg_types.append(safe_join(t.arg_types[i], s.arg_types[i]))
     # TODO kinds and argument names
     # TODO what should happen if one fallback is 'type' and the other is a user-provided metaclass?
     # The fallback type can be either 'function', 'type', or some user-provided metaclass.
@@ -806,7 +837,7 @@ def object_or_any_from_type(typ: ProperType) -> ProperType:
         return object_from_instance(typ.partial_fallback)
     elif isinstance(typ, TypeType):
         return object_or_any_from_type(typ.item)
-    elif isinstance(typ, TypeVarType) and isinstance(typ.upper_bound, ProperType):
+    elif isinstance(typ, TypeVarLikeType) and isinstance(typ.upper_bound, ProperType):
         return object_or_any_from_type(typ.upper_bound)
     elif isinstance(typ, UnionType):
         for item in typ.items:
@@ -814,6 +845,8 @@ def object_or_any_from_type(typ: ProperType) -> ProperType:
                 candidate = object_or_any_from_type(item)
                 if isinstance(candidate, Instance):
                     return candidate
+    elif isinstance(typ, UnpackType):
+        object_or_any_from_type(get_proper_type(typ.type))
     return AnyType(TypeOfAny.implementation_artifact)
 
 
diff --git a/mypy/maptype.py b/mypy/maptype.py
index 0d54a83127df..59ecb2bc9993 100644
--- a/mypy/maptype.py
+++ b/mypy/maptype.py
@@ -31,6 +31,9 @@ def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Insta
                     import mypy.typeops
 
                     return mypy.typeops.tuple_fallback(tuple_type)
+                elif isinstance(tuple_type, Instance):
+                    # This can happen after normalizing variadic tuples.
+                    return tuple_type
 
     if not superclass.type_vars:
         # Fast path: `superclass` has no type variables to map to.
diff --git a/mypy/meet.py b/mypy/meet.py
index fa9bd6a83743..d2fb16808425 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -869,16 +869,17 @@ def meet_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None:
             return None
         if s_unpack_index is not None and t_unpack_index is not None:
             # The only simple case we can handle if both tuples are variadic
-            # is when they are purely variadic. Other cases are tricky because
+            # is when their structure fully matches. Other cases are tricky because
             # a variadic item is effectively a union of tuples of all length, thus
             # potentially causing overlap between a suffix in `s` and a prefix
             # in `t` (see how this is handled in is_subtype() for details).
             # TODO: handle more cases (like when both prefix/suffix are shorter in s or t).
-            if s.length() == 1 and t.length() == 1:
-                s_unpack = s.items[0]
+            if s.length() == t.length() and s_unpack_index == t_unpack_index:
+                unpack_index = s_unpack_index
+                s_unpack = s.items[unpack_index]
                 assert isinstance(s_unpack, UnpackType)
                 s_unpacked = get_proper_type(s_unpack.type)
-                t_unpack = t.items[0]
+                t_unpack = t.items[unpack_index]
                 assert isinstance(t_unpack, UnpackType)
                 t_unpacked = get_proper_type(t_unpack.type)
                 if not (isinstance(s_unpacked, Instance) and isinstance(t_unpacked, Instance)):
@@ -886,7 +887,13 @@ def meet_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None:
                 meet = self.meet(s_unpacked, t_unpacked)
                 if not isinstance(meet, Instance):
                     return None
-                return [UnpackType(meet)]
+                m_prefix: list[Type] = []
+                for si, ti in zip(s.items[:unpack_index], t.items[:unpack_index]):
+                    m_prefix.append(meet_types(si, ti))
+                m_suffix: list[Type] = []
+                for si, ti in zip(s.items[unpack_index + 1 :], t.items[unpack_index + 1 :]):
+                    m_suffix.append(meet_types(si, ti))
+                return m_prefix + [UnpackType(meet)] + m_suffix
             return None
         if s_unpack_index is not None:
             variadic = s
@@ -1006,11 +1013,11 @@ def default(self, typ: Type) -> ProperType:
 
 
 def meet_similar_callables(t: CallableType, s: CallableType) -> CallableType:
-    from mypy.join import join_types
+    from mypy.join import safe_join
 
     arg_types: list[Type] = []
     for i in range(len(t.arg_types)):
-        arg_types.append(join_types(t.arg_types[i], s.arg_types[i]))
+        arg_types.append(safe_join(t.arg_types[i], s.arg_types[i]))
     # TODO in combine_similar_callables also applies here (names and kinds)
     # The fallback type can be either 'function' or 'type'. The result should have 'function' as
     # fallback only if both operands have it as 'function'.
diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py
index 425e5906926a..e8edfe65c8d4 100644
--- a/mypy/semanal_shared.py
+++ b/mypy/semanal_shared.py
@@ -45,6 +45,8 @@
     TypeOfAny,
     TypeVarId,
     TypeVarLikeType,
+    TypeVarTupleType,
+    UnpackType,
     get_proper_type,
 )
 
@@ -286,7 +288,23 @@ def calculate_tuple_fallback(typ: TupleType) -> None:
     """
     fallback = typ.partial_fallback
     assert fallback.type.fullname == "builtins.tuple"
-    fallback.args = (join.join_type_list(list(typ.items)),) + fallback.args[1:]
+    items = []
+    for item in typ.items:
+        # TODO: this duplicates some logic in typeops.tuple_fallback().
+        if isinstance(item, UnpackType):
+            unpacked_type = get_proper_type(item.type)
+            if isinstance(unpacked_type, TypeVarTupleType):
+                unpacked_type = get_proper_type(unpacked_type.upper_bound)
+            if (
+                isinstance(unpacked_type, Instance)
+                and unpacked_type.type.fullname == "builtins.tuple"
+            ):
+                items.append(unpacked_type.args[0])
+            else:
+                raise NotImplementedError
+        else:
+            items.append(item)
+    fallback.args = (join.join_type_list(items),)
 
 
 class _NamedTypeCallback(Protocol):
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 383e6eddd317..6d129683c3f5 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -734,9 +734,13 @@ def visit_tuple_type(self, left: TupleType) -> bool:
                 for li in left.items:
                     if isinstance(li, UnpackType):
                         unpack = get_proper_type(li.type)
-                        if isinstance(unpack, Instance):
-                            assert unpack.type.fullname == "builtins.tuple"
-                            li = unpack.args[0]
+                        if isinstance(unpack, TypeVarTupleType):
+                            unpack = get_proper_type(unpack.upper_bound)
+                        assert (
+                            isinstance(unpack, Instance)
+                            and unpack.type.fullname == "builtins.tuple"
+                        )
+                        li = unpack.args[0]
                     if not self._is_subtype(li, iter_type):
                         return False
                 return True
@@ -1578,6 +1582,18 @@ def are_parameters_compatible(
         return True
     trivial_suffix = is_trivial_suffix(right) and not is_proper_subtype
 
+    if (
+        right.arg_kinds == [ARG_STAR]
+        and isinstance(get_proper_type(right.arg_types[0]), AnyType)
+        and not is_proper_subtype
+    ):
+        # Similar to how (*Any, **Any) is considered a supertype of all callables, we consider
+        # (*Any) a supertype of all callables with positional arguments. This is needed in
+        # particular because we often refuse to try type inference if actual type is not
+        # a subtype of erased template type.
+        if all(k.is_positional() for k in left.arg_kinds) and ignore_pos_arg_names:
+            return True
+
     # Match up corresponding arguments and check them for compatibility. In
     # every pair (argL, argR) of corresponding arguments from L and R, argL must
     # be "more general" than argR if L is to be a subtype of R.
diff --git a/mypy/typeops.py b/mypy/typeops.py
index dff43775fe3d..2eb3b284e729 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -39,6 +39,7 @@
     Instance,
     LiteralType,
     NoneType,
+    NormalizedCallableType,
     Overloaded,
     Parameters,
     ParamSpecType,
@@ -364,7 +365,7 @@ def erase_to_bound(t: Type) -> Type:
 
 
 def callable_corresponding_argument(
-    typ: CallableType | Parameters, model: FormalArgument
+    typ: NormalizedCallableType | Parameters, model: FormalArgument
 ) -> FormalArgument | None:
     """Return the argument a function that corresponds to `model`"""
 
diff --git a/mypy/types_utils.py b/mypy/types_utils.py
index f289ac3e9ed1..1cd56eae5835 100644
--- a/mypy/types_utils.py
+++ b/mypy/types_utils.py
@@ -144,8 +144,7 @@ def store_argument_type(
         elif isinstance(arg_type, UnpackType):
             unpacked_type = get_proper_type(arg_type.type)
             if isinstance(unpacked_type, TupleType):
-                # Instead of using Tuple[Unpack[Tuple[...]]], just use
-                # Tuple[...]
+                # Instead of using Tuple[Unpack[Tuple[...]]], just use Tuple[...]
                 arg_type = unpacked_type
             elif (
                 isinstance(unpacked_type, Instance)
@@ -153,6 +152,7 @@ def store_argument_type(
             ):
                 arg_type = unpacked_type
             else:
+                # TODO: verify that we can only have a TypeVarTuple here.
                 arg_type = TupleType(
                     [arg_type],
                     fallback=named_type("builtins.tuple", [named_type("builtins.object", [])]),
diff --git a/mypy/typevars.py b/mypy/typevars.py
index 027a8e3f7fc5..3d74a40c303f 100644
--- a/mypy/typevars.py
+++ b/mypy/typevars.py
@@ -6,6 +6,7 @@
     AnyType,
     Instance,
     ParamSpecType,
+    ProperType,
     TupleType,
     Type,
     TypeOfAny,
@@ -55,6 +56,7 @@ def fill_typevars(typ: TypeInfo) -> Instance | TupleType:
             )
         tvs.append(tv)
     inst = Instance(typ, tvs)
+    # TODO: do we need to also handle typeddict_type here and below?
     if typ.tuple_type is None:
         return inst
     return typ.tuple_type.copy_modified(fallback=inst)
@@ -62,10 +64,23 @@ def fill_typevars(typ: TypeInfo) -> Instance | TupleType:
 
 def fill_typevars_with_any(typ: TypeInfo) -> Instance | TupleType:
     """Apply a correct number of Any's as type arguments to a type."""
-    inst = Instance(typ, [AnyType(TypeOfAny.special_form)] * len(typ.defn.type_vars))
+    args: list[Type] = []
+    for tv in typ.defn.type_vars:
+        # Valid erasure for *Ts is *tuple[Any, ...], not just Any.
+        if isinstance(tv, TypeVarTupleType):
+            args.append(
+                UnpackType(tv.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)]))
+            )
+        else:
+            args.append(AnyType(TypeOfAny.special_form))
+    inst = Instance(typ, args)
     if typ.tuple_type is None:
         return inst
-    return typ.tuple_type.copy_modified(fallback=inst)
+    erased_tuple_type = erase_typevars(typ.tuple_type, {tv.id for tv in typ.defn.type_vars})
+    assert isinstance(erased_tuple_type, ProperType)
+    if isinstance(erased_tuple_type, TupleType):
+        return typ.tuple_type.copy_modified(fallback=inst)
+    return inst
 
 
 def has_no_typevars(typ: Type) -> bool:
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index f2625b869c19..eb7a795f99c0 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -6454,6 +6454,49 @@ class C(Generic[P]):
     def __init__(self, fn: Callable[P, int]) -> None: ...
 [builtins fixtures/dict.pyi]
 
+[case testVariadicClassIncrementalUpdateRegularToVariadic]
+from typing import Any
+from lib import C
+
+x: C[int, str]
+
+[file lib.py]
+from typing import Generic, TypeVar
+
+T = TypeVar("T")
+S = TypeVar("S")
+class C(Generic[T, S]): ...
+
+[file lib.py.2]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Unpack[Ts]]): ...
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicClassIncrementalUpdateVariadicToRegular]
+from typing import Any
+from lib import C
+
+x: C[int, str, int]
+
+[file lib.py]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Unpack[Ts]]): ...
+[file lib.py.2]
+from typing import Generic, TypeVar
+
+T = TypeVar("T")
+S = TypeVar("S")
+class C(Generic[T, S]): ...
+[builtins fixtures/tuple.pyi]
+[out2]
+main:4: error: "C" expects 2 type arguments, but 3 given
+
 [case testVariadicTupleIncrementalUpdateNoCrash]
 import m
 [file m.py]
diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test
index 640e64c78d5f..d3cdf3af849d 100644
--- a/test-data/unit/check-python310.test
+++ b/test-data/unit/check-python310.test
@@ -700,6 +700,21 @@ match m:
         reveal_type(m)  # N: Revealed type is "__main__.A[Any]"
         reveal_type(i)  # N: Revealed type is "Any"
 
+[case testMatchClassPatternCaptureVariadicGeneric]
+from typing import Generic, Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple('Ts')
+class A(Generic[Unpack[Ts]]):
+    a: Tuple[Unpack[Ts]]
+
+m: object
+match m:
+    case A(a=i):
+        reveal_type(m)  # N: Revealed type is "__main__.A[Unpack[builtins.tuple[Any, ...]]]"
+        reveal_type(i)  # N: Revealed type is "builtins.tuple[Any, ...]"
+[builtins fixtures/tuple.pyi]
+
 [case testMatchClassPatternCaptureGenericAlreadyKnown]
 from typing import Generic, TypeVar
 
@@ -2026,3 +2041,105 @@ def f4(e: int | str | bytes) -> int:
     return 0
 
 [builtins fixtures/primitives.pyi]
+
+[case testMatchSequencePatternVariadicTupleNotTooShort]
+from typing import Tuple
+from typing_extensions import Unpack
+
+fm1: Tuple[int, int, Unpack[Tuple[str, ...]], int]
+match fm1:
+    case [fa1, fb1, fc1]:
+        reveal_type(fa1)  # N: Revealed type is "builtins.int"
+        reveal_type(fb1)  # N: Revealed type is "builtins.int"
+        reveal_type(fc1)  # N: Revealed type is "builtins.int"
+
+fm2: Tuple[int, int, Unpack[Tuple[str, ...]], int]
+match fm2:
+    case [fa2, fb2]:
+        reveal_type(fa2)
+        reveal_type(fb2)
+
+fm3: Tuple[int, int, Unpack[Tuple[str, ...]], int]
+match fm3:
+    case [fa3, fb3, fc3, fd3, fe3]:
+        reveal_type(fa3)  # N: Revealed type is "builtins.int"
+        reveal_type(fb3)  # N: Revealed type is "builtins.int"
+        reveal_type(fc3)  # N: Revealed type is "builtins.str"
+        reveal_type(fd3)  # N: Revealed type is "builtins.str"
+        reveal_type(fe3)  # N: Revealed type is "builtins.int"
+
+m1: Tuple[int, Unpack[Tuple[str, ...]], int]
+match m1:
+    case [a1, *b1, c1]:
+        reveal_type(a1)  # N: Revealed type is "builtins.int"
+        reveal_type(b1)  # N: Revealed type is "builtins.list[builtins.str]"
+        reveal_type(c1)  # N: Revealed type is "builtins.int"
+
+m2: Tuple[int, Unpack[Tuple[str, ...]], int]
+match m2:
+    case [a2, b2, *c2, d2, e2]:
+        reveal_type(a2)  # N: Revealed type is "builtins.int"
+        reveal_type(b2)  # N: Revealed type is "builtins.str"
+        reveal_type(c2)  # N: Revealed type is "builtins.list[builtins.str]"
+        reveal_type(d2)  # N: Revealed type is "builtins.str"
+        reveal_type(e2)  # N: Revealed type is "builtins.int"
+
+m3: Tuple[int, int, Unpack[Tuple[str, ...]], int, int]
+match m3:
+    case [a3, *b3, c3]:
+        reveal_type(a3)  # N: Revealed type is "builtins.int"
+        reveal_type(b3)  # N: Revealed type is "builtins.list[Union[builtins.int, builtins.str]]"
+        reveal_type(c3)  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]
+
+[case testMatchSequencePatternTypeVarTupleNotTooShort]
+from typing import Tuple
+from typing_extensions import Unpack, TypeVarTuple
+
+Ts = TypeVarTuple("Ts")
+def test(xs: Tuple[Unpack[Ts]]) -> None:
+    fm1: Tuple[int, int, Unpack[Ts], int]
+    match fm1:
+        case [fa1, fb1, fc1]:
+            reveal_type(fa1)  # N: Revealed type is "builtins.int"
+            reveal_type(fb1)  # N: Revealed type is "builtins.int"
+            reveal_type(fc1)  # N: Revealed type is "builtins.int"
+
+    fm2: Tuple[int, int, Unpack[Ts], int]
+    match fm2:
+        case [fa2, fb2]:
+            reveal_type(fa2)
+            reveal_type(fb2)
+
+    fm3: Tuple[int, int, Unpack[Ts], int]
+    match fm3:
+        case [fa3, fb3, fc3, fd3, fe3]:
+            reveal_type(fa3)  # N: Revealed type is "builtins.int"
+            reveal_type(fb3)  # N: Revealed type is "builtins.int"
+            reveal_type(fc3)  # N: Revealed type is "builtins.object"
+            reveal_type(fd3)  # N: Revealed type is "builtins.object"
+            reveal_type(fe3)  # N: Revealed type is "builtins.int"
+
+    m1: Tuple[int, Unpack[Ts], int]
+    match m1:
+        case [a1, *b1, c1]:
+            reveal_type(a1)  # N: Revealed type is "builtins.int"
+            reveal_type(b1)  # N: Revealed type is "builtins.list[builtins.object]"
+            reveal_type(c1)  # N: Revealed type is "builtins.int"
+
+    m2: Tuple[int, Unpack[Ts], int]
+    match m2:
+        case [a2, b2, *c2, d2, e2]:
+            reveal_type(a2)  # N: Revealed type is "builtins.int"
+            reveal_type(b2)  # N: Revealed type is "builtins.object"
+            reveal_type(c2)  # N: Revealed type is "builtins.list[builtins.object]"
+            reveal_type(d2)  # N: Revealed type is "builtins.object"
+            reveal_type(e2)  # N: Revealed type is "builtins.int"
+
+    m3: Tuple[int, int, Unpack[Ts], int, int]
+    match m3:
+        case [a3, *b3, c3]:
+            reveal_type(a3)  # N: Revealed type is "builtins.int"
+            reveal_type(b3)  # N: Revealed type is "builtins.list[builtins.object]"
+            reveal_type(c3)  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test
index bf7a928ff51d..29abe9cb025b 100644
--- a/test-data/unit/check-selftype.test
+++ b/test-data/unit/check-selftype.test
@@ -2036,3 +2036,23 @@ class Ben(Object):
         foo_method = cls.MY_MAP["foo"]
         return foo_method(Foo())
 [builtins fixtures/isinstancelist.pyi]
+
+[case testSelfTypeOnGenericClassObjectNewStyleBound]
+from typing import Generic, TypeVar, Self
+
+T = TypeVar("T")
+S = TypeVar("S")
+class B(Generic[T, S]):
+    def copy(self) -> Self: ...
+
+b: B[int, str]
+reveal_type(B.copy(b))  # N: Revealed type is "__main__.B[builtins.int, builtins.str]"
+
+class C(B[T, S]): ...
+
+c: C[int, str]
+reveal_type(C.copy(c))  # N: Revealed type is "__main__.C[builtins.int, builtins.str]"
+
+B.copy(42)  # E: Value of type variable "Self" of "copy" of "B" cannot be "int"
+C.copy(42)  # E: Value of type variable "Self" of "copy" of "B" cannot be "int"
+[builtins fixtures/tuple.pyi]
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 1a2573898170..7b8a22313b36 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -1911,6 +1911,180 @@ z = C[int]()  # E: Bad number of arguments, expected: at least 2, given: 1
 reveal_type(z)  # N: Revealed type is "__main__.C[Any, Unpack[builtins.tuple[Any, ...]], Any]"
 [builtins fixtures/tuple.pyi]
 
+[case testVariadicTupleTupleSubclassPrefixSuffix]
+from typing import Tuple
+from typing_extensions import Unpack
+
+i: int
+
+class A(Tuple[int, Unpack[Tuple[int, ...]]]): ...
+a: A
+reveal_type(a[i])  # N: Revealed type is "builtins.int"
+
+class B(Tuple[Unpack[Tuple[int, ...]], int]): ...
+b: B
+reveal_type(b[i])  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicClassSubclassInit]
+from typing import Tuple, Generic, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class B(Generic[Unpack[Ts]]):
+    def __init__(self, x: Tuple[Unpack[Ts]], *args: Unpack[Ts]) -> None: ...
+reveal_type(B)  # N: Revealed type is "def [Ts] (x: Tuple[Unpack[Ts`1]], *args: Unpack[Ts`1]) -> __main__.B[Unpack[Ts`1]]"
+
+T = TypeVar("T")
+S = TypeVar("S")
+class C(B[T, S]): ...
+reveal_type(C)  # N: Revealed type is "def [T, S] (x: Tuple[T`1, S`2], T`1, S`2) -> __main__.C[T`1, S`2]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicClassGenericSelf]
+from typing import Tuple, Generic, TypeVar
+from typing_extensions import TypeVarTuple, Unpack
+
+T = TypeVar("T")
+S = TypeVar("S")
+Ts = TypeVarTuple("Ts")
+class B(Generic[Unpack[Ts]]):
+    def copy(self: T) -> T: ...
+    def on_pair(self: B[T, S]) -> Tuple[T, S]: ...
+
+b1: B[int]
+reveal_type(b1.on_pair())  # E: Invalid self argument "B[int]" to attribute function "on_pair" with type "Callable[[B[T, S]], Tuple[T, S]]" \
+                           # N: Revealed type is "Tuple[Never, Never]"
+b2: B[int, str]
+reveal_type(b2.on_pair())  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
+b3: B[int, str, int]
+reveal_type(b3.on_pair())  # E: Invalid self argument "B[int, str, int]" to attribute function "on_pair" with type "Callable[[B[T, S]], Tuple[T, S]]" \
+                           # N: Revealed type is "Tuple[Never, Never]"
+
+class C(B[T, S]): ...
+c: C[int, str]
+reveal_type(c.copy())  # N: Revealed type is "__main__.C[builtins.int, builtins.str]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicClassNewStyleSelf]
+from typing import Generic, TypeVar, Self
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class B(Generic[Unpack[Ts]]):
+    next: Self
+    def copy(self) -> Self:
+        return self.next
+
+b: B[int, str, int]
+reveal_type(b.next)  # N: Revealed type is "__main__.B[builtins.int, builtins.str, builtins.int]"
+reveal_type(b.copy())  # N: Revealed type is "__main__.B[builtins.int, builtins.str, builtins.int]"
+reveal_type(B.copy(b))  # N: Revealed type is "__main__.B[builtins.int, builtins.str, builtins.int]"
+
+T = TypeVar("T")
+S = TypeVar("S")
+class C(B[T, S]): ...
+c: C[int, str]
+
+reveal_type(c.next)  # N: Revealed type is "__main__.C[builtins.int, builtins.str]"
+reveal_type(c.copy())  # N: Revealed type is "__main__.C[builtins.int, builtins.str]"
+reveal_type(C.copy(c))  # N: Revealed type is "__main__.C[builtins.int, builtins.str]"
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicTupleDataclass]
+from dataclasses import dataclass
+from typing import Generic, TypeVar, Tuple
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+
+@dataclass
+class B(Generic[Unpack[Ts]]):
+    items: Tuple[Unpack[Ts]]
+
+reveal_type(B)  # N: Revealed type is "def [Ts] (items: Tuple[Unpack[Ts`1]]) -> __main__.B[Unpack[Ts`1]]"
+b = B((1, "yes"))
+reveal_type(b.items)  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
+
+T = TypeVar("T")
+S = TypeVar("S")
+
+@dataclass
+class C(B[T, S]):
+    first: T
+    second: S
+
+reveal_type(C)  # N: Revealed type is "def [T, S] (items: Tuple[T`1, S`2], first: T`1, second: S`2) -> __main__.C[T`1, S`2]"
+c = C((1, "yes"), 2, "no")
+reveal_type(c.items)  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
+reveal_type(c.first)  # N: Revealed type is "builtins.int"
+reveal_type(c.second)  # N: Revealed type is "builtins.str"
+[builtins fixtures/dataclasses.pyi]
+[typing fixtures/typing-medium.pyi]
+
+[case testVariadicTupleInProtocol]
+from typing import Protocol, Tuple, List
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class P(Protocol[Unpack[Ts]]):
+    def items(self) -> Tuple[Unpack[Ts]]: ...
+
+class PC(Protocol[Unpack[Ts]]):
+    def meth(self, *args: Unpack[Ts]) -> None: ...
+
+def get_items(x: P[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: ...
+def match(x: PC[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: ...
+
+class Bad:
+    def items(self) -> List[int]: ...
+    def meth(self, *, named: int) -> None: ...
+
+class Good:
+    def items(self) -> Tuple[int, str]: ...
+    def meth(self, __x: int, y: str) -> None: ...
+
+g: Good
+reveal_type(get_items(g))  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
+reveal_type(match(g))  # N: Revealed type is "Tuple[builtins.int, builtins.str]"
+
+b: Bad
+get_items(b)  # E: Argument 1 to "get_items" has incompatible type "Bad"; expected "P[Unpack[Tuple[Never, ...]]]" \
+              # N: Following member(s) of "Bad" have conflicts: \
+              # N:     Expected: \
+              # N:         def items(self) -> Tuple[Never, ...] \
+              # N:     Got: \
+              # N:         def items(self) -> List[int]
+match(b)  # E: Argument 1 to "match" has incompatible type "Bad"; expected "PC[Unpack[Tuple[Never, ...]]]" \
+          # N: Following member(s) of "Bad" have conflicts: \
+          # N:     Expected: \
+          # N:         def meth(self, *args: Never) -> None \
+          # N:     Got: \
+          # N:         def meth(self, *, named: int) -> None
+[builtins fixtures/tuple.pyi]
+
+[case testVariadicTupleCollectionCheck]
+from typing import Tuple, Optional
+from typing_extensions import Unpack
+
+allowed: Tuple[int, Unpack[Tuple[int, ...]]]
+
+x: Optional[int]
+if x in allowed:
+    reveal_type(x)  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]
+
+[case testJoinOfVariadicTupleCallablesNoCrash]
+from typing import Callable, Tuple
+
+f: Callable[[int, *Tuple[str, ...], int], None]
+g: Callable[[int, *Tuple[str, ...], int], None]
+reveal_type([f, g])  # N: Revealed type is "builtins.list[def (builtins.int, *Unpack[Tuple[Unpack[builtins.tuple[builtins.str, ...]], builtins.int]])]"
+
+h: Callable[[int, *Tuple[str, ...], str], None]
+reveal_type([f, h])  # N: Revealed type is "builtins.list[def (builtins.int, *Unpack[Tuple[Unpack[builtins.tuple[builtins.str, ...]], Never]])]"
+[builtins fixtures/tuple.pyi]
+
 [case testTypeVarTupleBothUnpacksSimple]
 from typing import Tuple
 from typing_extensions import Unpack, TypeVarTuple, TypedDict
diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test
index cb24467cbf41..5dc42bd62d9b 100644
--- a/test-data/unit/fine-grained.test
+++ b/test-data/unit/fine-grained.test
@@ -9908,6 +9908,128 @@ x = 0  # Arbitrary change to trigger reprocessing
 ==
 a.py:3: note: Revealed type is "Tuple[Literal[1]?, Literal['x']?]"
 
+[case testVariadicClassFineUpdateRegularToVariadic]
+from typing import Any
+from lib import C
+
+x: C[int, str]
+
+[file lib.py]
+from typing import Generic, TypeVar
+
+T = TypeVar("T")
+S = TypeVar("S")
+class C(Generic[T, S]): ...
+
+[file lib.py.2]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Unpack[Ts]]): ...
+[builtins fixtures/tuple.pyi]
+[out]
+==
+
+[case testVariadicClassFineUpdateVariadicToRegular]
+from typing import Any
+from lib import C
+
+x: C[int, str, int]
+
+[file lib.py]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Unpack[Ts]]): ...
+[file lib.py.2]
+from typing import Generic, TypeVar
+
+T = TypeVar("T")
+S = TypeVar("S")
+class C(Generic[T, S]): ...
+[builtins fixtures/tuple.pyi]
+[out]
+==
+main:4: error: "C" expects 2 type arguments, but 3 given
+
+-- Order of error messages is different, so we repeat the test twice.
+[case testVariadicClassFineUpdateValidToInvalidCached-only_when_cache]
+from typing import Any
+from lib import C
+
+x: C[int, str]
+
+[file lib.py]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Unpack[Ts]]): ...
+
+[file lib.py.2]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Ts]): ...
+[builtins fixtures/tuple.pyi]
+[out]
+==
+main:4: error: "C" expects no type arguments, but 2 given
+lib.py:5: error: Free type variable expected in Generic[...]
+
+[case testVariadicClassFineUpdateValidToInvalid-only_when_nocache]
+from typing import Any
+from lib import C
+
+x: C[int, str]
+
+[file lib.py]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Unpack[Ts]]): ...
+
+[file lib.py.2]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Ts]): ...
+[builtins fixtures/tuple.pyi]
+[out]
+==
+lib.py:5: error: Free type variable expected in Generic[...]
+main:4: error: "C" expects no type arguments, but 2 given
+
+[case testVariadicClassFineUpdateInvalidToValid]
+from typing import Any
+from lib import C
+
+x: C[int, str]
+
+[file lib.py]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Ts]): ...
+
+[file lib.py.2]
+from typing import Generic
+from typing_extensions import TypeVarTuple, Unpack
+
+Ts = TypeVarTuple("Ts")
+class C(Generic[Unpack[Ts]]): ...
+[builtins fixtures/tuple.pyi]
+[out]
+lib.py:5: error: Free type variable expected in Generic[...]
+main:4: error: "C" expects no type arguments, but 2 given
+==
+
 [case testUnpackKwargsUpdateFine]
 import m
 [file shared.py]

From c4ab46e6acdeab8fd503322311e2b934c9622695 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Sat, 28 Oct 2023 15:00:13 +0100
Subject: [PATCH 209/288] Cache information about whether file is typeshed file
 (#16351)

We used to check if a file is in typeshed a lot. This seems to speed up
self-check by about 2%, and this should also speed up tests a bit.
---
 mypy/checker.py      |  4 ++--
 mypy/nodes.py        | 11 ++++++++++-
 mypy/semanal.py      | 13 ++-----------
 mypy/semanal_main.py |  2 +-
 4 files changed, 15 insertions(+), 15 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 62ba642256bf..b2804b25e35c 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -222,7 +222,7 @@
 from mypy.types_utils import is_overlapping_none, remove_optional, store_argument_type, strip_type
 from mypy.typetraverser import TypeTraverserVisitor
 from mypy.typevars import fill_typevars, fill_typevars_with_any, has_no_typevars
-from mypy.util import is_dunder, is_sunder, is_typeshed_file
+from mypy.util import is_dunder, is_sunder
 from mypy.visitor import NodeVisitor
 
 T = TypeVar("T")
@@ -400,7 +400,7 @@ def __init__(
         self.pass_num = 0
         self.current_node_deferred = False
         self.is_stub = tree.is_stub
-        self.is_typeshed_stub = is_typeshed_file(options.abs_custom_typeshed_dir, path)
+        self.is_typeshed_stub = tree.is_typeshed_file(options)
         self.inferred_attribute_types = None
 
         # If True, process function definitions. If False, don't. This is used
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 0e5c078d0227..1d7b3e3be84b 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -27,7 +27,7 @@
 
 import mypy.strconv
 from mypy.options import Options
-from mypy.util import short_type
+from mypy.util import is_typeshed_file, short_type
 from mypy.visitor import ExpressionVisitor, NodeVisitor, StatementVisitor
 
 if TYPE_CHECKING:
@@ -283,6 +283,7 @@ class MypyFile(SymbolNode):
         "is_partial_stub_package",
         "plugin_deps",
         "future_import_flags",
+        "_is_typeshed_file",
     )
 
     __match_args__ = ("name", "path", "defs")
@@ -319,6 +320,7 @@ class MypyFile(SymbolNode):
     plugin_deps: dict[str, set[str]]
     # Future imports defined in this file. Populated during semantic analysis.
     future_import_flags: set[str]
+    _is_typeshed_file: bool | None
 
     def __init__(
         self,
@@ -346,6 +348,7 @@ def __init__(
         self.is_cache_skeleton = False
         self.is_partial_stub_package = False
         self.future_import_flags = set()
+        self._is_typeshed_file = None
 
     def local_definitions(self) -> Iterator[Definition]:
         """Return all definitions within the module (including nested).
@@ -371,6 +374,12 @@ def is_package_init_file(self) -> bool:
     def is_future_flag_set(self, flag: str) -> bool:
         return flag in self.future_import_flags
 
+    def is_typeshed_file(self, options: Options) -> bool:
+        # Cache result since this is called a lot
+        if self._is_typeshed_file is None:
+            self._is_typeshed_file = is_typeshed_file(options.abs_custom_typeshed_dir, self.path)
+        return self._is_typeshed_file
+
     def serialize(self) -> JsonDict:
         return {
             ".class": "MypyFile",
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 27491ac695ae..41943e1db8b0 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -283,14 +283,7 @@
 )
 from mypy.types_utils import is_invalid_recursive_alias, store_argument_type
 from mypy.typevars import fill_typevars
-from mypy.util import (
-    correct_relative_import,
-    is_dunder,
-    is_typeshed_file,
-    module_prefix,
-    unmangle,
-    unnamed_function,
-)
+from mypy.util import correct_relative_import, is_dunder, module_prefix, unmangle, unnamed_function
 from mypy.visitor import NodeVisitor
 
 T = TypeVar("T")
@@ -777,9 +770,7 @@ def file_context(
         self.cur_mod_id = file_node.fullname
         with scope.module_scope(self.cur_mod_id):
             self._is_stub_file = file_node.path.lower().endswith(".pyi")
-            self._is_typeshed_stub_file = is_typeshed_file(
-                options.abs_custom_typeshed_dir, file_node.path
-            )
+            self._is_typeshed_stub_file = file_node.is_typeshed_file(options)
             self.globals = file_node.names
             self.tvar_scope = TypeVarLikeScope()
 
diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py
index ec09deb0952f..1185a3821553 100644
--- a/mypy/semanal_main.py
+++ b/mypy/semanal_main.py
@@ -380,7 +380,7 @@ def check_type_arguments(graph: Graph, scc: list[str], errors: Errors) -> None:
         analyzer = TypeArgumentAnalyzer(
             errors,
             state.options,
-            is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""),
+            state.tree.is_typeshed_file(state.options),
             state.manager.semantic_analyzer.named_type,
         )
         with state.wrap_context():

From c76132f63de5de4d3f9818d070c1cd26d2209d5a Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Sat, 28 Oct 2023 15:00:23 +0100
Subject: [PATCH 210/288] Add fast path for checking self types (#16352)

The check was pretty expensive, though usually it's not doing anything
non-trivial. Added a fast path for cases where we use the implicit self
type, which covers the vast majority of cases.

This makes self-check about 4% faster.
---
 mypy/checker.py  | 58 +++++++++++++++++++++++++-----------------------
 mypy/subtypes.py | 12 ++++++++++
 2 files changed, 42 insertions(+), 28 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index b2804b25e35c..f51ba746ea75 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -1199,13 +1199,14 @@ def check_func_def(
                 # Push return type.
                 self.return_types.append(typ.ret_type)
 
+                with self.scope.push_function(defn):
+                    # We temporary push the definition to get the self type as
+                    # visible from *inside* of this function/method.
+                    ref_type: Type | None = self.scope.active_self_type()
+
                 # Store argument types.
                 for i in range(len(typ.arg_types)):
                     arg_type = typ.arg_types[i]
-                    with self.scope.push_function(defn):
-                        # We temporary push the definition to get the self type as
-                        # visible from *inside* of this function/method.
-                        ref_type: Type | None = self.scope.active_self_type()
                     if (
                         isinstance(defn, FuncDef)
                         and ref_type is not None
@@ -1215,30 +1216,31 @@ def check_func_def(
                     ):
                         if defn.is_class or defn.name == "__new__":
                             ref_type = mypy.types.TypeType.make_normalized(ref_type)
-                        # This level of erasure matches the one in checkmember.check_self_arg(),
-                        # better keep these two checks consistent.
-                        erased = get_proper_type(erase_typevars(erase_to_bound(arg_type)))
-                        if not is_subtype(ref_type, erased, ignore_type_params=True):
-                            if (
-                                isinstance(erased, Instance)
-                                and erased.type.is_protocol
-                                or isinstance(erased, TypeType)
-                                and isinstance(erased.item, Instance)
-                                and erased.item.type.is_protocol
-                            ):
-                                # We allow the explicit self-type to be not a supertype of
-                                # the current class if it is a protocol. For such cases
-                                # the consistency check will be performed at call sites.
-                                msg = None
-                            elif typ.arg_names[i] in {"self", "cls"}:
-                                msg = message_registry.ERASED_SELF_TYPE_NOT_SUPERTYPE.format(
-                                    erased.str_with_options(self.options),
-                                    ref_type.str_with_options(self.options),
-                                )
-                            else:
-                                msg = message_registry.MISSING_OR_INVALID_SELF_TYPE
-                            if msg:
-                                self.fail(msg, defn)
+                        if not is_same_type(arg_type, ref_type):
+                            # This level of erasure matches the one in checkmember.check_self_arg(),
+                            # better keep these two checks consistent.
+                            erased = get_proper_type(erase_typevars(erase_to_bound(arg_type)))
+                            if not is_subtype(ref_type, erased, ignore_type_params=True):
+                                if (
+                                    isinstance(erased, Instance)
+                                    and erased.type.is_protocol
+                                    or isinstance(erased, TypeType)
+                                    and isinstance(erased.item, Instance)
+                                    and erased.item.type.is_protocol
+                                ):
+                                    # We allow the explicit self-type to be not a supertype of
+                                    # the current class if it is a protocol. For such cases
+                                    # the consistency check will be performed at call sites.
+                                    msg = None
+                                elif typ.arg_names[i] in {"self", "cls"}:
+                                    msg = message_registry.ERASED_SELF_TYPE_NOT_SUPERTYPE.format(
+                                        erased.str_with_options(self.options),
+                                        ref_type.str_with_options(self.options),
+                                    )
+                                else:
+                                    msg = message_registry.MISSING_OR_INVALID_SELF_TYPE
+                                if msg:
+                                    self.fail(msg, defn)
                     elif isinstance(arg_type, TypeVarType):
                         # Refuse covariant parameter type variables
                         # TODO: check recursively for inner type variables
diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 6d129683c3f5..7e37751b1c15 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -258,6 +258,18 @@ def is_same_type(
     This means types may have different representation (e.g. an alias, or
     a non-simplified union) but are semantically exchangeable in all contexts.
     """
+    # First, use fast path for some common types. This is performance-critical.
+    if (
+        type(a) is Instance
+        and type(b) is Instance
+        and a.type == b.type
+        and len(a.args) == len(b.args)
+        and a.last_known_value is b.last_known_value
+    ):
+        return all(is_same_type(x, y) for x, y in zip(a.args, b.args))
+    elif isinstance(a, TypeVarType) and isinstance(b, TypeVarType) and a.id == b.id:
+        return True
+
     # Note that using ignore_promotions=True (default) makes types like int and int64
     # considered not the same type (which is the case at runtime).
     # Also Union[bool, int] (if it wasn't simplified before) will be different

From 2aa2443107534715a650dbe78474e7d91cc9df20 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 28 Oct 2023 09:04:58 -0700
Subject: [PATCH 211/288] Avoid importing from setuptools._distutils (#16348)

Fixes #16318, as requested by setuptools maintainer
---
 mypyc/build.py | 31 ++++++++++++++-----------------
 1 file changed, 14 insertions(+), 17 deletions(-)

diff --git a/mypyc/build.py b/mypyc/build.py
index 9889577d4add..0af8908e14d0 100644
--- a/mypyc/build.py
+++ b/mypyc/build.py
@@ -40,8 +40,16 @@
 from mypyc.namegen import exported_name
 from mypyc.options import CompilerOptions
 
-if sys.version_info < (3, 12):
-    if TYPE_CHECKING:
+try:
+    # Import setuptools so that it monkey-patch overrides distutils
+    import setuptools
+except ImportError:
+    pass
+
+if TYPE_CHECKING:
+    if sys.version_info >= (3, 12):
+        from setuptools import Extension
+    else:
         from distutils.core import Extension as _distutils_Extension
         from typing_extensions import TypeAlias
 
@@ -49,22 +57,11 @@
 
         Extension: TypeAlias = Union[_setuptools_Extension, _distutils_Extension]
 
-    try:
-        # Import setuptools so that it monkey-patch overrides distutils
-        import setuptools
-    except ImportError:
-        pass
-    from distutils import ccompiler, sysconfig
+if sys.version_info >= (3, 12):
+    # From setuptools' monkeypatch
+    from distutils import ccompiler, sysconfig  # type: ignore[import-not-found]
 else:
-    import setuptools
-    from setuptools import Extension
-    from setuptools._distutils import (
-        ccompiler as _ccompiler,  # type: ignore[attr-defined]
-        sysconfig as _sysconfig,  # type: ignore[attr-defined]
-    )
-
-    ccompiler = _ccompiler
-    sysconfig = _sysconfig
+    from distutils import ccompiler, sysconfig
 
 
 def get_extension() -> type[Extension]:

From 65a068ed21c4563590062ad3fbd9e58fe0e7968d Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Sun, 29 Oct 2023 06:45:24 +0000
Subject: [PATCH 212/288] Speed up type argument checking (#16353)

The upper bound is usually `object`, so add a fast path and skip a
potentially slow subtype check if that's the case. Also make type
annotations more precise.

This seems to at least speed up type checker tests, by 1-2% or so. This
also potentially speeds up self-check a bit, though probably by less
than 1%.
---
 mypy/semanal_typeargs.py | 19 +++++++++++++------
 1 file changed, 13 insertions(+), 6 deletions(-)

diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py
index a25bab8de054..15ea15d612c0 100644
--- a/mypy/semanal_typeargs.py
+++ b/mypy/semanal_typeargs.py
@@ -7,7 +7,7 @@
 
 from __future__ import annotations
 
-from typing import Callable, Sequence
+from typing import Callable
 
 from mypy import errorcodes as codes, message_registry
 from mypy.errorcodes import ErrorCode
@@ -88,7 +88,7 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None:
             return
         self.seen_aliases.add(t)
         assert t.alias is not None, f"Unfixed type alias {t.type_ref}"
-        is_error = self.validate_args(t.alias.name, t.args, t.alias.alias_tvars, t)
+        is_error = self.validate_args(t.alias.name, tuple(t.args), t.alias.alias_tvars, t)
         if not is_error:
             # If there was already an error for the alias itself, there is no point in checking
             # the expansion, most likely it will result in the same kind of error.
@@ -131,7 +131,7 @@ def visit_instance(self, t: Instance) -> None:
                     t.args = unpacked.args
 
     def validate_args(
-        self, name: str, args: Sequence[Type], type_vars: list[TypeVarLikeType], ctx: Context
+        self, name: str, args: tuple[Type, ...], type_vars: list[TypeVarLikeType], ctx: Context
     ) -> bool:
         if any(isinstance(v, TypeVarTupleType) for v in type_vars):
             prefix = next(i for (i, v) in enumerate(type_vars) if isinstance(v, TypeVarTupleType))
@@ -140,7 +140,7 @@ def validate_args(
             start, middle, end = split_with_prefix_and_suffix(
                 tuple(args), prefix, len(type_vars) - prefix - 1
             )
-            args = list(start) + [TupleType(list(middle), tvt.tuple_fallback)] + list(end)
+            args = start + (TupleType(list(middle), tvt.tuple_fallback),) + end
 
         is_error = False
         for (i, arg), tvar in zip(enumerate(args), type_vars):
@@ -174,7 +174,14 @@ def validate_args(
                         arg_values = [arg]
                     if self.check_type_var_values(name, arg_values, tvar.name, tvar.values, ctx):
                         is_error = True
-                if not is_subtype(arg, tvar.upper_bound):
+                # Check against upper bound. Since it's object the vast majority of the time,
+                # add fast path to avoid a potentially slow subtype check.
+                upper_bound = tvar.upper_bound
+                object_upper_bound = (
+                    type(upper_bound) is Instance
+                    and upper_bound.type.fullname == "builtins.object"
+                )
+                if not object_upper_bound and not is_subtype(arg, upper_bound):
                     if self.in_type_alias_expr and isinstance(arg, TypeVarType):
                         # Type aliases are allowed to use unconstrained type variables
                         # error will be checked at substitution point.
@@ -184,7 +191,7 @@ def validate_args(
                         message_registry.INVALID_TYPEVAR_ARG_BOUND.format(
                             format_type(arg, self.options),
                             name,
-                            format_type(tvar.upper_bound, self.options),
+                            format_type(upper_bound, self.options),
                         ),
                         ctx,
                         code=codes.TYPE_VAR,

From cf045d924d6688f5f4d0c3402f38d30bc81db299 Mon Sep 17 00:00:00 2001
From: dinaldoap <38653153+dinaldoap@users.noreply.github.com>
Date: Mon, 30 Oct 2023 02:06:19 -0300
Subject: [PATCH 213/288] doc: remove duplicate word (#16365)

This PR removes one of the duplicate **in** in the sentence "This option
is only useful in in the absence of `__init__.py`" in the file
`docs/source/command_line.rst`.
---
 docs/source/command_line.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
index 4e954c7c2ccb..5db118334519 100644
--- a/docs/source/command_line.rst
+++ b/docs/source/command_line.rst
@@ -133,7 +133,7 @@ imports.
 
     This flag tells mypy that top-level packages will be based in either the
     current directory, or a member of the ``MYPYPATH`` environment variable or
-    :confval:`mypy_path` config option. This option is only useful in
+    :confval:`mypy_path` config option. This option is only useful
     in the absence of `__init__.py`. See :ref:`Mapping file
     paths to modules <mapping-paths-to-modules>` for details.
 

From b8c748a77a27b27599b9c2b4097427e055f4c16c Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 30 Oct 2023 11:07:20 +0000
Subject: [PATCH 214/288] Fix incremental crash on TypedDict in method (#16364)

Fixes https://github.com/python/mypy/issues/16336

All the story with `@`-names is a mess. FWIW I just copied the logic
from named tuples, where it works. So although it is a mess, it will be
now be a consistent mess, with full parity between `NamedTuple` and
`TypedDict`.
---
 mypy/semanal.py                       |  7 ++++---
 mypy/semanal_typeddict.py             |  2 ++
 test-data/unit/check-incremental.test | 22 +++++++++++++++++++++-
 3 files changed, 27 insertions(+), 4 deletions(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index 41943e1db8b0..bd24c48ed24f 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -1745,7 +1745,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> bool:
             if info is None:
                 self.mark_incomplete(defn.name, defn)
             else:
-                self.prepare_class_def(defn, info)
+                self.prepare_class_def(defn, info, custom_names=True)
             return True
         return False
 
@@ -2099,8 +2099,9 @@ def prepare_class_def(
                 # Preserve name from previous fine-grained incremental run.
                 global_name = defn.info.name
             defn.fullname = defn.info._fullname
-            if defn.info.is_named_tuple:
-                # Named tuple nested within a class is stored in the class symbol table.
+            if defn.info.is_named_tuple or defn.info.typeddict_type:
+                # Named tuples and Typed dicts nested within a class are stored
+                # in the class symbol table.
                 self.add_symbol_skip_local(global_name, defn.info)
             else:
                 self.globals[global_name] = SymbolTableNode(GDEF, defn.info)
diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py
index 51424d8800d2..e9aaee55879a 100644
--- a/mypy/semanal_typeddict.py
+++ b/mypy/semanal_typeddict.py
@@ -101,6 +101,8 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N
             fields, types, statements, required_keys = self.analyze_typeddict_classdef_fields(defn)
             if fields is None:
                 return True, None  # Defer
+            if self.api.is_func_scope() and "@" not in defn.name:
+                defn.name += "@" + str(defn.line)
             info = self.build_typeddict_typeinfo(
                 defn.name, fields, types, required_keys, defn.line, existing_info
             )
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index eb7a795f99c0..806a585bff39 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -5135,7 +5135,6 @@ tmp/b.py:4: error: First argument to namedtuple() should be "NT", not "BadName"
 tmp/b.py:4: error: First argument to namedtuple() should be "NT", not "BadName"
 
 [case testNewAnalyzerIncrementalMethodNamedTuple]
-
 import a
 [file a.py]
 from b import C
@@ -6540,3 +6539,24 @@ from typing_extensions import TypedDict
 def test() -> None:
     Counts = TypedDict("Counts", {k: int for k in "abc"})  # type: ignore
 [builtins fixtures/dict.pyi]
+
+[case testNoIncrementalCrashOnTypedDictMethod]
+import a
+[file a.py]
+from b import C
+x: C
+[file a.py.2]
+from b import C
+x: C
+reveal_type(x.h)
+[file b.py]
+from typing_extensions import TypedDict
+class C:
+    def __init__(self) -> None:
+        self.h: Hidden
+        class Hidden(TypedDict):
+            x: int
+[builtins fixtures/dict.pyi]
+[out]
+[out2]
+tmp/a.py:3: note: Revealed type is "TypedDict('b.C.Hidden@5', {'x': builtins.int})"

From 4e30e896486b774cdecaef6d3521a585b8acf8bc Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 30 Oct 2023 11:55:21 +0000
Subject: [PATCH 215/288] Fix dmypy inspect for namespace packages (#16357)

Fixes https://github.com/python/mypy/issues/15781

The fix is to switch to already resolved paths instead of relying on
`crawl_up()`. This should be more robust w.r.t. various special cases. I
also tweak the tests slightly to show full file names, to have a more
consistent output.
---
 mypy/dmypy_server.py                     |  4 +-
 mypy/inspections.py                      | 16 ++---
 mypy/test/testfinegrained.py             |  2 +-
 test-data/unit/daemon.test               | 18 +++++-
 test-data/unit/fine-grained-inspect.test | 80 ++++++++++++------------
 5 files changed, 65 insertions(+), 55 deletions(-)

diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py
index 9cc0888fc208..0db349b5bf82 100644
--- a/mypy/dmypy_server.py
+++ b/mypy/dmypy_server.py
@@ -461,6 +461,7 @@ def initialize_fine_grained(
         messages = result.errors
         self.fine_grained_manager = FineGrainedBuildManager(result)
 
+        original_sources_len = len(sources)
         if self.following_imports():
             sources = find_all_sources_in_build(self.fine_grained_manager.graph, sources)
             self.update_sources(sources)
@@ -525,7 +526,8 @@ def initialize_fine_grained(
 
         __, n_notes, __ = count_stats(messages)
         status = 1 if messages and n_notes < len(messages) else 0
-        messages = self.pretty_messages(messages, len(sources), is_tty, terminal_width)
+        # We use explicit sources length to match the logic in non-incremental mode.
+        messages = self.pretty_messages(messages, original_sources_len, is_tty, terminal_width)
         return {"out": "".join(s + "\n" for s in messages), "err": "", "status": status}
 
     def fine_grained_increment(
diff --git a/mypy/inspections.py b/mypy/inspections.py
index cb695a80eef2..45e981a24af2 100644
--- a/mypy/inspections.py
+++ b/mypy/inspections.py
@@ -6,7 +6,6 @@
 from typing import Callable
 
 from mypy.build import State
-from mypy.find_sources import InvalidSourceList, SourceFinder
 from mypy.messages import format_type
 from mypy.modulefinder import PYTHON_EXTENSIONS
 from mypy.nodes import (
@@ -206,9 +205,6 @@ def __init__(
         force_reload: bool = False,
     ) -> None:
         self.fg_manager = fg_manager
-        self.finder = SourceFinder(
-            self.fg_manager.manager.fscache, self.fg_manager.manager.options
-        )
         self.verbosity = verbosity
         self.limit = limit
         self.include_span = include_span
@@ -561,16 +557,14 @@ def find_module(self, file: str) -> tuple[State | None, dict[str, object]]:
         if not any(file.endswith(ext) for ext in PYTHON_EXTENSIONS):
             return None, {"error": "Source file is not a Python file"}
 
-        try:
-            module, _ = self.finder.crawl_up(os.path.normpath(file))
-        except InvalidSourceList:
-            return None, {"error": "Invalid source file name: " + file}
-
-        state = self.fg_manager.graph.get(module)
+        # We are using a bit slower but robust way to find a module by path,
+        # to be sure that namespace packages are handled properly.
+        abs_path = os.path.abspath(file)
+        state = next((s for s in self.fg_manager.graph.values() if s.abspath == abs_path), None)
         self.module = state
         return (
             state,
-            {"out": f"Unknown module: {module}", "err": "", "status": 1} if state is None else {},
+            {"out": f"Unknown module: {file}", "err": "", "status": 1} if state is None else {},
         )
 
     def run_inspection(
diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py
index ba0526d32558..c517c54286d7 100644
--- a/mypy/test/testfinegrained.py
+++ b/mypy/test/testfinegrained.py
@@ -352,7 +352,7 @@ def maybe_inspect(self, step: int, server: Server, src: str) -> list[str]:
             )
             val = res["error"] if "error" in res else res["out"] + res["err"]
             output.extend(val.strip().split("\n"))
-        return normalize_messages(output)
+        return output
 
     def get_suggest(self, program_text: str, incremental_step: int) -> list[tuple[str, str]]:
         step_bit = "1?" if incremental_step == 1 else str(incremental_step)
diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test
index 18a03a92207d..ca0cd90911b9 100644
--- a/test-data/unit/daemon.test
+++ b/test-data/unit/daemon.test
@@ -379,7 +379,7 @@ $ dmypy inspect foo.pyc:1:1:2:2
 Source file is not a Python file
 == Return code: 2
 $ dmypy inspect bar/baz.py:1:1:2:2
-Unknown module: baz
+Unknown module: bar/baz.py
 == Return code: 1
 $ dmypy inspect foo.py:3:1:1:1
 "end_line" must not be before "line"
@@ -434,7 +434,7 @@ $ dmypy inspect foo.pyc:1:2
 Source file is not a Python file
 == Return code: 2
 $ dmypy inspect bar/baz.py:1:2
-Unknown module: baz
+Unknown module: bar/baz.py
 == Return code: 1
 $ dmypy inspect foo.py:7:5 --include-span
 7:5:7:5 -> "int"
@@ -571,3 +571,17 @@ class A:
     x: int
 class B:
     x: int
+
+[case testDaemonInspectSelectCorrectFile]
+$ dmypy run test.py --export-types
+Daemon started
+Success: no issues found in 1 source file
+$ dmypy inspect demo/test.py:1:1
+"int"
+$ dmypy inspect test.py:1:1
+"str"
+[file test.py]
+b: str
+from demo.test import a
+[file demo/test.py]
+a: int
diff --git a/test-data/unit/fine-grained-inspect.test b/test-data/unit/fine-grained-inspect.test
index 2c575ec365b1..f8ce35585c10 100644
--- a/test-data/unit/fine-grained-inspect.test
+++ b/test-data/unit/fine-grained-inspect.test
@@ -1,8 +1,8 @@
 [case testInspectTypeBasic]
-# inspect2: --include-kind foo.py:10:13
-# inspect2: --show=type --include-kind foo.py:10:13
-# inspect2: --include-span -vv foo.py:12:5
-# inspect2: --include-span --include-kind foo.py:12:5:12:9
+# inspect2: --include-kind tmp/foo.py:10:13
+# inspect2: --show=type --include-kind tmp/foo.py:10:13
+# inspect2: --include-span -vv tmp/foo.py:12:5
+# inspect2: --include-span --include-kind tmp/foo.py:12:5:12:9
 import foo
 [file foo.py]
 from typing import TypeVar, Generic
@@ -29,10 +29,10 @@ MemberExpr -> "T"
 CallExpr:12:5:12:9 -> "C[int]"
 
 [case testInspectAttrsBasic]
-# inspect2: --show=attrs foo.py:6:1
-# inspect2: --show=attrs foo.py:7:1
-# inspect2: --show=attrs foo.py:10:1
-# inspect2: --show=attrs --include-object-attrs  foo.py:10:1
+# inspect2: --show=attrs tmp/foo.py:6:1
+# inspect2: --show=attrs tmp/foo.py:7:1
+# inspect2: --show=attrs tmp/foo.py:10:1
+# inspect2: --show=attrs --include-object-attrs  tmp/foo.py:10:1
 import foo
 [file foo.py]
 from bar import Meta
@@ -56,12 +56,12 @@ class Meta(type):
 {"function": ["__name__"], "object": ["__init__"]}
 
 [case testInspectDefBasic]
-# inspect2: --show=definition foo.py:5:5
-# inspect2: --show=definition --include-kind foo.py:6:3
-# inspect2: --show=definition --include-span foo.py:7:5
-# inspect2: --show=definition foo.py:8:1:8:4
-# inspect2: --show=definition foo.py:8:6:8:8
-# inspect2: --show=definition foo.py:9:3
+# inspect2: --show=definition tmp/foo.py:5:5
+# inspect2: --show=definition --include-kind tmp/foo.py:6:3
+# inspect2: --show=definition --include-span tmp/foo.py:7:5
+# inspect2: --show=definition tmp/foo.py:8:1:8:4
+# inspect2: --show=definition tmp/foo.py:8:6:8:8
+# inspect2: --show=definition tmp/foo.py:9:3
 import foo
 [file foo.py]
 from bar import var, test, A
@@ -95,18 +95,18 @@ def foo(x: Union[int, str]) -> None:
 [builtins fixtures/classmethod.pyi]
 [out]
 ==
-bar.py:4:0:meth
+tmp/bar.py:4:0:meth
 MemberExpr -> tmp/bar.py:2:5:x
 7:1:7:5 -> tmp/bar.py:6:9:y
-bar.py:9:1:test
-bar.py:8:1:var
-baz.py:3:2:foo
+tmp/bar.py:9:1:test
+tmp/bar.py:8:1:var
+tmp/baz.py:3:2:foo
 
 [case testInspectFallbackAttributes]
-# inspect2: --show=attrs --include-object-attrs foo.py:5:1
-# inspect2: --show=attrs foo.py:8:1
-# inspect2: --show=attrs --include-kind foo.py:10:1
-# inspect2: --show=attrs --include-kind --include-object-attrs foo.py:10:1
+# inspect2: --show=attrs --include-object-attrs tmp/foo.py:5:1
+# inspect2: --show=attrs tmp/foo.py:8:1
+# inspect2: --show=attrs --include-kind tmp/foo.py:10:1
+# inspect2: --show=attrs --include-kind --include-object-attrs tmp/foo.py:10:1
 import foo
 [file foo.py]
 class B: ...
@@ -128,7 +128,7 @@ NameExpr -> {}
 NameExpr -> {"object": ["__eq__", "__init__", "__ne__"]}
 
 [case testInspectTypeVarBoundAttrs]
-# inspect2: --show=attrs foo.py:8:13
+# inspect2: --show=attrs tmp/foo.py:8:13
 import foo
 [file foo.py]
 from typing import TypeVar
@@ -144,10 +144,10 @@ def foo(arg: T) -> T:
 {"C": ["x"]}
 
 [case testInspectTypeVarValuesAttrs]
-# inspect2: --show=attrs --force-reload foo.py:13:13
-# inspect2: --show=attrs --force-reload --union-attrs foo.py:13:13
-# inspect2: --show=attrs foo.py:16:5
-# inspect2: --show=attrs --union-attrs foo.py:16:5
+# inspect2: --show=attrs --force-reload tmp/foo.py:13:13
+# inspect2: --show=attrs --force-reload --union-attrs tmp/foo.py:13:13
+# inspect2: --show=attrs tmp/foo.py:16:5
+# inspect2: --show=attrs --union-attrs tmp/foo.py:16:5
 import foo
 [file foo.py]
 from typing import TypeVar, Generic
@@ -174,8 +174,8 @@ class C(Generic[T]):
 {"A": ["x", "z"], "B": ["y", "z"]}
 
 [case testInspectTypeVarBoundDef]
-# inspect2: --show=definition foo.py:9:13
-# inspect2: --show=definition foo.py:8:9
+# inspect2: --show=definition tmp/foo.py:9:13
+# inspect2: --show=definition tmp/foo.py:8:9
 import foo
 [file foo.py]
 from typing import TypeVar
@@ -189,13 +189,13 @@ def foo(arg: T) -> T:
     return arg
 [out]
 ==
-foo.py:7:9:arg
-foo.py:4:5:x
+tmp/foo.py:7:9:arg
+tmp/foo.py:4:5:x
 
 [case testInspectTypeVarValuesDef]
-# inspect2: --show=definition --force-reload foo.py:13:9
-# inspect2: --show=definition --force-reload foo.py:14:13
-# inspect2: --show=definition foo.py:18:7
+# inspect2: --show=definition --force-reload tmp/foo.py:13:9
+# inspect2: --show=definition --force-reload tmp/foo.py:14:13
+# inspect2: --show=definition tmp/foo.py:18:7
 import foo
 [file foo.py]
 from typing import TypeVar, Generic
@@ -218,12 +218,12 @@ class C(Generic[T]):
     x.z
 [out]
 ==
-foo.py:5:5:z, tmp/foo.py:9:5:z
-foo.py:12:9:arg
-foo.py:5:5:z, tmp/foo.py:9:5:z
+tmp/foo.py:5:5:z, tmp/foo.py:9:5:z
+tmp/foo.py:12:9:arg
+tmp/foo.py:5:5:z, tmp/foo.py:9:5:z
 
 [case testInspectModuleAttrs]
-# inspect2: --show=attrs foo.py:2:1
+# inspect2: --show=attrs tmp/foo.py:2:1
 import foo
 [file foo.py]
 from pack import bar
@@ -239,7 +239,7 @@ class C: ...
 {"<pack.bar>": ["C", "__annotations__", "__doc__", "__file__", "__name__", "__package__", "bar", "x"], "ModuleType": ["__file__", "__getattr__"]}
 
 [case testInspectModuleDef]
-# inspect2: --show=definition --include-kind foo.py:2:1
+# inspect2: --show=definition --include-kind tmp/foo.py:2:1
 import foo
 [file foo.py]
 from pack import bar
@@ -255,7 +255,7 @@ NameExpr -> tmp/pack/bar.py:1:1:bar
 MemberExpr -> tmp/pack/bar.py:3:5:x
 
 [case testInspectFunctionArgDef]
-# inspect2: --show=definition --include-span foo.py:4:13
+# inspect2: --show=definition --include-span tmp/foo.py:4:13
 # TODO: for now all arguments have line/column set to function definition.
 import foo
 [file foo.py]

From b064a5c183b53a84d895bb8e3c36a3a74e24be9c Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 30 Oct 2023 11:57:42 +0000
Subject: [PATCH 216/288] Fix dmypy inspect on Windows (#16355)

Fixes https://github.com/python/mypy/issues/15780
---
 mypy/inspections.py        | 24 ++++++++++++++++--------
 mypy/test/testutil.py      |  5 +++++
 test-data/unit/daemon.test |  3 +++
 3 files changed, 24 insertions(+), 8 deletions(-)

diff --git a/mypy/inspections.py b/mypy/inspections.py
index 45e981a24af2..3e660a0bd7a6 100644
--- a/mypy/inspections.py
+++ b/mypy/inspections.py
@@ -215,13 +215,6 @@ def __init__(
         # Module for which inspection was requested.
         self.module: State | None = None
 
-    def parse_location(self, location: str) -> tuple[str, list[int]]:
-        if location.count(":") not in [2, 4]:
-            raise ValueError("Format should be file:line:column[:end_line:end_column]")
-        parts = location.split(":")
-        module, *rest = parts
-        return module, [int(p) for p in rest]
-
     def reload_module(self, state: State) -> None:
         """Reload given module while temporary exporting types."""
         old = self.fg_manager.manager.options.export_types
@@ -575,7 +568,7 @@ def run_inspection(
         This can be re-used by various simple inspections.
         """
         try:
-            file, pos = self.parse_location(location)
+            file, pos = parse_location(location)
         except ValueError as err:
             return {"error": str(err)}
 
@@ -617,3 +610,18 @@ def get_definition(self, location: str) -> dict[str, object]:
             result["out"] = f"No name or member expressions at {location}"
             result["status"] = 1
         return result
+
+
+def parse_location(location: str) -> tuple[str, list[int]]:
+    if location.count(":") < 2:
+        raise ValueError("Format should be file:line:column[:end_line:end_column]")
+    parts = location.rsplit(":", maxsplit=2)
+    start, *rest = parts
+    # Note: we must allow drive prefix like `C:` on Windows.
+    if start.count(":") < 2:
+        return start, [int(p) for p in rest]
+    parts = start.rsplit(":", maxsplit=2)
+    start, *start_rest = parts
+    if start.count(":") < 2:
+        return start, [int(p) for p in start_rest + rest]
+    raise ValueError("Format should be file:line:column[:end_line:end_column]")
diff --git a/mypy/test/testutil.py b/mypy/test/testutil.py
index 89184b11a826..571e4d0b11f2 100644
--- a/mypy/test/testutil.py
+++ b/mypy/test/testutil.py
@@ -3,6 +3,7 @@
 import os
 from unittest import TestCase, mock
 
+from mypy.inspections import parse_location
 from mypy.util import get_terminal_width
 
 
@@ -15,3 +16,7 @@ def test_get_terminal_size_in_pty_defaults_to_80(self) -> None:
         with mock.patch.object(os, "get_terminal_size", return_value=ret):
             with mock.patch.dict(os.environ, values=mock_environ, clear=True):
                 assert get_terminal_width() == 80
+
+    def test_parse_location_windows(self) -> None:
+        assert parse_location(r"C:\test.py:1:1") == (r"C:\test.py", [1, 1])
+        assert parse_location(r"C:\test.py:1:1:1:1") == (r"C:\test.py", [1, 1, 1, 1])
diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test
index ca0cd90911b9..77367eb02bfe 100644
--- a/test-data/unit/daemon.test
+++ b/test-data/unit/daemon.test
@@ -372,6 +372,9 @@ foo.py:3: error: Incompatible types in assignment (expression has type "str", va
 $ dmypy inspect foo:1
 Format should be file:line:column[:end_line:end_column]
 == Return code: 2
+$ dmypy inspect foo:1:2:3
+Source file is not a Python file
+== Return code: 2
 $ dmypy inspect foo.py:1:2:a:b
 invalid literal for int() with base 10: 'a'
 == Return code: 2

From ad0e183b0df7cc3dd94d9e1cd6f5710859beda96 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 30 Oct 2023 12:14:00 +0000
Subject: [PATCH 217/288] Enable Unpack/TypeVarTuple support (#16354)

Fixes https://github.com/python/mypy/issues/12280
Fixes https://github.com/python/mypy/issues/14697

In this PR:
* Enable `TypeVarTuple` and `Unpack` features.
* Delete the old blanket `--enable-incomplete-features` flag that was
deprecated a year ago.
* Switch couple corner cases to `PreciseTupleTypes` feature.
* Add the draft docs about the new feature.
* Handle a previously unhandled case where variadic tuple appears in
string formatting (discovered on mypy self-check, where
`PreciseTupleTypes` is already enabled).

---------

Co-authored-by: Jelle Zijlstra <jelle.zijlstra@gmail.com>
---
 docs/source/command_line.rst            | 52 +++++++++++++++++++++++++
 mypy/checkexpr.py                       |  8 ++--
 mypy/checkstrformat.py                  | 19 +++++++++
 mypy/main.py                            | 17 +++-----
 mypy/options.py                         |  6 +--
 mypy/semanal.py                         |  5 +--
 mypy/test/testcheck.py                  |  3 --
 mypy/test/testfinegrained.py            |  3 +-
 mypy/test/testsemanal.py                |  3 +-
 mypy/test/testtransform.py              |  2 -
 mypy/typeanal.py                        |  4 +-
 test-data/unit/check-flags.test         | 12 ------
 test-data/unit/check-tuples.test        | 16 ++++++++
 test-data/unit/check-typevar-tuple.test |  3 ++
 test-data/unit/cmdline.test             | 18 +++++----
 15 files changed, 116 insertions(+), 55 deletions(-)

diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
index 5db118334519..a810c35cb77f 100644
--- a/docs/source/command_line.rst
+++ b/docs/source/command_line.rst
@@ -991,6 +991,58 @@ format into the specified directory.
     library or specify mypy installation with the setuptools extra
     ``mypy[reports]``.
 
+
+Enabling incomplete/experimental features
+*****************************************
+
+.. option:: --enable-incomplete-feature FEATURE
+
+    Some features may require several mypy releases to implement, for example
+    due to their complexity, potential for backwards incompatibility, or
+    ambiguous semantics that would benefit from feedback from the community.
+    You can enable such features for early preview using this flag. Note that
+    it is not guaranteed that all features will be ultimately enabled by
+    default. In *rare cases* we may decide to not go ahead with certain
+    features.
+
+List of currently incomplete/experimental features:
+
+* ``PreciseTupleTypes``: this feature will infer more precise tuple types in
+  various scenarios. Before variadic types were added to the Python type system
+  by :pep:`646`, it was impossible to express a type like "a tuple with
+  at least two integers". The best type available was ``tuple[int, ...]``.
+  Therefore, mypy applied very lenient checking for variable-length tuples.
+  Now this type can be expressed as ``tuple[int, int, *tuple[int, ...]]``.
+  For such more precise types (when explicitly *defined* by a user) mypy,
+  for example, warns about unsafe index access, and generally handles them
+  in a type-safe manner. However, to avoid problems in existing code, mypy
+  does not *infer* these precise types when it technically can. Here are
+  notable examples where ``PreciseTupleTypes`` infers more precise types:
+
+  .. code-block:: python
+
+     numbers: tuple[int, ...]
+
+     more_numbers = (1, *numbers, 1)
+     reveal_type(more_numbers)
+     # Without PreciseTupleTypes: tuple[int, ...]
+     # With PreciseTupleTypes: tuple[int, *tuple[int, ...], int]
+
+     other_numbers = (1, 1) + numbers
+     reveal_type(other_numbers)
+     # Without PreciseTupleTypes: tuple[int, ...]
+     # With PreciseTupleTypes: tuple[int, int, *tuple[int, ...]]
+
+     if len(numbers) > 2:
+         reveal_type(numbers)
+         # Without PreciseTupleTypes: tuple[int, ...]
+         # With PreciseTupleTypes: tuple[int, int, int, *tuple[int, ...]]
+     else:
+         reveal_type(numbers)
+         # Without PreciseTupleTypes: tuple[int, ...]
+         # With PreciseTupleTypes: tuple[()] | tuple[int] | tuple[int, int]
+
+
 Miscellaneous
 *************
 
diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index df6000050986..0207c245b1f9 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -97,7 +97,7 @@
     YieldExpr,
     YieldFromExpr,
 )
-from mypy.options import TYPE_VAR_TUPLE
+from mypy.options import PRECISE_TUPLE_TYPES
 from mypy.plugin import (
     FunctionContext,
     FunctionSigContext,
@@ -3377,7 +3377,7 @@ def visit_op_expr(self, e: OpExpr) -> Type:
                         ):
                             return self.concat_tuples(proper_left_type, proper_right_type)
                 elif (
-                    TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature
+                    PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature
                     and isinstance(proper_right_type, Instance)
                     and self.chk.type_is_iterable(proper_right_type)
                 ):
@@ -3411,7 +3411,7 @@ def visit_op_expr(self, e: OpExpr) -> Type:
                 if is_named_instance(proper_right_type, "builtins.dict"):
                     use_reverse = USE_REVERSE_NEVER
 
-        if TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature:
+        if PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature:
             # Handle tuple[X, ...] + tuple[Y, Z] = tuple[*tuple[X, ...], Y, Z].
             if (
                 e.op == "+"
@@ -4988,7 +4988,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type:
                         j += len(tt.items)
                 else:
                     if (
-                        TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature
+                        PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature
                         and not seen_unpack_in_items
                     ):
                         # Handle (x, *y, z), where y is e.g. tuple[Y, ...].
diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py
index eeb9e7633756..39d44e84a9c1 100644
--- a/mypy/checkstrformat.py
+++ b/mypy/checkstrformat.py
@@ -47,8 +47,11 @@
     TupleType,
     Type,
     TypeOfAny,
+    TypeVarTupleType,
     TypeVarType,
     UnionType,
+    UnpackType,
+    find_unpack_in_list,
     get_proper_type,
     get_proper_types,
 )
@@ -728,6 +731,22 @@ def check_simple_str_interpolation(
         rep_types: list[Type] = []
         if isinstance(rhs_type, TupleType):
             rep_types = rhs_type.items
+            unpack_index = find_unpack_in_list(rep_types)
+            if unpack_index is not None:
+                # TODO: we should probably warn about potentially short tuple.
+                # However, without special-casing for tuple(f(i) for in other_tuple)
+                # this causes false positive on mypy self-check in report.py.
+                extras = max(0, len(checkers) - len(rep_types) + 1)
+                unpacked = rep_types[unpack_index]
+                assert isinstance(unpacked, UnpackType)
+                unpacked = get_proper_type(unpacked.type)
+                if isinstance(unpacked, TypeVarTupleType):
+                    unpacked = get_proper_type(unpacked.upper_bound)
+                assert (
+                    isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple"
+                )
+                unpack_items = [unpacked.args[0]] * extras
+                rep_types = rep_types[:unpack_index] + unpack_items + rep_types[unpack_index + 1 :]
         elif isinstance(rhs_type, AnyType):
             return
         elif isinstance(rhs_type, Instance) and rhs_type.type.fullname == "builtins.tuple":
diff --git a/mypy/main.py b/mypy/main.py
index 43ab761072ca..1aede530c33e 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -22,7 +22,7 @@
 from mypy.find_sources import InvalidSourceList, create_source_list
 from mypy.fscache import FileSystemCache
 from mypy.modulefinder import BuildSource, FindModuleCache, SearchPaths, get_search_dirs, mypy_path
-from mypy.options import INCOMPLETE_FEATURES, BuildType, Options
+from mypy.options import COMPLETE_FEATURES, INCOMPLETE_FEATURES, BuildType, Options
 from mypy.split_namespace import SplitNamespace
 from mypy.version import __version__
 
@@ -1151,10 +1151,7 @@ def add_invertible_flag(
     # --debug-serialize will run tree.serialize() even if cache generation is disabled.
     # Useful for mypy_primer to detect serialize errors earlier.
     parser.add_argument("--debug-serialize", action="store_true", help=argparse.SUPPRESS)
-    # This one is deprecated, but we will keep it for few releases.
-    parser.add_argument(
-        "--enable-incomplete-features", action="store_true", help=argparse.SUPPRESS
-    )
+
     parser.add_argument(
         "--disable-bytearray-promotion", action="store_true", help=argparse.SUPPRESS
     )
@@ -1334,14 +1331,10 @@ def set_strict_flags() -> None:
 
     # Validate incomplete features.
     for feature in options.enable_incomplete_feature:
-        if feature not in INCOMPLETE_FEATURES:
+        if feature not in INCOMPLETE_FEATURES | COMPLETE_FEATURES:
             parser.error(f"Unknown incomplete feature: {feature}")
-    if options.enable_incomplete_features:
-        print(
-            "Warning: --enable-incomplete-features is deprecated, use"
-            " --enable-incomplete-feature=FEATURE instead"
-        )
-        options.enable_incomplete_feature = list(INCOMPLETE_FEATURES)
+        if feature in COMPLETE_FEATURES:
+            print(f"Warning: {feature} is already enabled by default")
 
     # Compute absolute path for custom typeshed (if present).
     if options.custom_typeshed_dir is not None:
diff --git a/mypy/options.py b/mypy/options.py
index 31d5d584f897..8bb20dbd4410 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -69,11 +69,12 @@ class BuildType:
     }
 ) - {"debug_cache"}
 
-# Features that are currently incomplete/experimental
+# Features that are currently (or were recently) incomplete/experimental
 TYPE_VAR_TUPLE: Final = "TypeVarTuple"
 UNPACK: Final = "Unpack"
 PRECISE_TUPLE_TYPES: Final = "PreciseTupleTypes"
-INCOMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK, PRECISE_TUPLE_TYPES))
+INCOMPLETE_FEATURES: Final = frozenset((PRECISE_TUPLE_TYPES,))
+COMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK))
 
 
 class Options:
@@ -307,7 +308,6 @@ def __init__(self) -> None:
         self.dump_type_stats = False
         self.dump_inference_stats = False
         self.dump_build_stats = False
-        self.enable_incomplete_features = False  # deprecated
         self.enable_incomplete_feature: list[str] = []
         self.timing_stats: str | None = None
         self.line_checking_stats: str | None = None
diff --git a/mypy/semanal.py b/mypy/semanal.py
index bd24c48ed24f..6f322af816ea 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -179,7 +179,7 @@
     type_aliases_source_versions,
     typing_extensions_aliases,
 )
-from mypy.options import TYPE_VAR_TUPLE, Options
+from mypy.options import Options
 from mypy.patterns import (
     AsPattern,
     ClassPattern,
@@ -4417,9 +4417,6 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool:
             else:
                 self.fail(f'Unexpected keyword argument "{param_name}" for "TypeVarTuple"', s)
 
-        if not self.incomplete_feature_enabled(TYPE_VAR_TUPLE, s):
-            return False
-
         name = self.extract_typevarlike_name(s, call)
         if name is None:
             return False
diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py
index 591421465a97..3ad97ced61f2 100644
--- a/mypy/test/testcheck.py
+++ b/mypy/test/testcheck.py
@@ -10,7 +10,6 @@
 from mypy.build import Graph
 from mypy.errors import CompileError
 from mypy.modulefinder import BuildSource, FindModuleCache, SearchPaths
-from mypy.options import TYPE_VAR_TUPLE, UNPACK
 from mypy.test.config import test_data_prefix, test_temp_dir
 from mypy.test.data import DataDrivenTestCase, DataSuite, FileOperation, module_from_path
 from mypy.test.helpers import (
@@ -125,8 +124,6 @@ def run_case_once(
         # Parse options after moving files (in case mypy.ini is being moved).
         options = parse_options(original_program_text, testcase, incremental_step)
         options.use_builtins_fixtures = True
-        if not testcase.name.endswith("_no_incomplete"):
-            options.enable_incomplete_feature += [TYPE_VAR_TUPLE, UNPACK]
         options.show_traceback = True
 
         # Enable some options automatically based on test file name.
diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py
index c517c54286d7..953f91a60df7 100644
--- a/mypy/test/testfinegrained.py
+++ b/mypy/test/testfinegrained.py
@@ -28,7 +28,7 @@
 from mypy.errors import CompileError
 from mypy.find_sources import create_source_list
 from mypy.modulefinder import BuildSource
-from mypy.options import TYPE_VAR_TUPLE, UNPACK, Options
+from mypy.options import Options
 from mypy.server.mergecheck import check_consistency
 from mypy.server.update import sort_messages_preserving_file_order
 from mypy.test.config import test_temp_dir
@@ -149,7 +149,6 @@ def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bo
         options.use_fine_grained_cache = self.use_cache and not build_cache
         options.cache_fine_grained = self.use_cache
         options.local_partial_types = True
-        options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK]
         # Treat empty bodies safely for these test cases.
         options.allow_empty_bodies = not testcase.name.endswith("_no_empty")
         if re.search("flags:.*--follow-imports", source) is None:
diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py
index 3455f41aa20a..cdecc4739168 100644
--- a/mypy/test/testsemanal.py
+++ b/mypy/test/testsemanal.py
@@ -10,7 +10,7 @@
 from mypy.errors import CompileError
 from mypy.modulefinder import BuildSource
 from mypy.nodes import TypeInfo
-from mypy.options import TYPE_VAR_TUPLE, UNPACK, Options
+from mypy.options import Options
 from mypy.test.config import test_temp_dir
 from mypy.test.data import DataDrivenTestCase, DataSuite
 from mypy.test.helpers import (
@@ -45,7 +45,6 @@ def get_semanal_options(program_text: str, testcase: DataDrivenTestCase) -> Opti
     options.semantic_analysis_only = True
     options.show_traceback = True
     options.python_version = PYTHON3_VERSION
-    options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK]
     options.force_uppercase_builtins = True
     return options
 
diff --git a/mypy/test/testtransform.py b/mypy/test/testtransform.py
index ba9fe8668fb4..9388dca02c7a 100644
--- a/mypy/test/testtransform.py
+++ b/mypy/test/testtransform.py
@@ -5,7 +5,6 @@
 from mypy import build
 from mypy.errors import CompileError
 from mypy.modulefinder import BuildSource
-from mypy.options import TYPE_VAR_TUPLE, UNPACK
 from mypy.test.config import test_temp_dir
 from mypy.test.data import DataDrivenTestCase, DataSuite
 from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options
@@ -38,7 +37,6 @@ def test_transform(testcase: DataDrivenTestCase) -> None:
         options = parse_options(src, testcase, 1)
         options.use_builtins_fixtures = True
         options.semantic_analysis_only = True
-        options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK]
         options.show_traceback = True
         options.force_uppercase_builtins = True
         result = build.build(
diff --git a/mypy/typeanal.py b/mypy/typeanal.py
index 03579404aac9..d238a452e7a9 100644
--- a/mypy/typeanal.py
+++ b/mypy/typeanal.py
@@ -35,7 +35,7 @@
     check_arg_names,
     get_nongen_builtins,
 )
-from mypy.options import UNPACK, Options
+from mypy.options import Options
 from mypy.plugin import AnalyzeTypeContext, Plugin, TypeAnalyzerPluginInterface
 from mypy.semanal_shared import SemanticAnalyzerCoreInterface, paramspec_args, paramspec_kwargs
 from mypy.tvar_scope import TypeVarLikeScope
@@ -664,8 +664,6 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ
             # In most contexts, TypeGuard[...] acts as an alias for bool (ignoring its args)
             return self.named_type("builtins.bool")
         elif fullname in ("typing.Unpack", "typing_extensions.Unpack"):
-            if not self.api.incomplete_feature_enabled(UNPACK, t):
-                return AnyType(TypeOfAny.from_error)
             if len(t.args) != 1:
                 self.fail("Unpack[...] requires exactly one type argument", t)
                 return AnyType(TypeOfAny.from_error)
diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test
index 546d02a07ad0..04adaca317c1 100644
--- a/test-data/unit/check-flags.test
+++ b/test-data/unit/check-flags.test
@@ -2190,18 +2190,6 @@ x: int = ""  # E: Incompatible types in assignment (expression has type "str", v
 # flags: --hide-error-codes
 x: int = ""  # E: Incompatible types in assignment (expression has type "str", variable has type "int")
 
-[case testTypeVarTupleDisabled_no_incomplete]
-from typing_extensions import TypeVarTuple
-Ts = TypeVarTuple("Ts")  # E: "TypeVarTuple" support is experimental, use --enable-incomplete-feature=TypeVarTuple to enable
-[builtins fixtures/tuple.pyi]
-
-[case testTypeVarTupleEnabled_no_incomplete]
-# flags: --enable-incomplete-feature=TypeVarTuple
-from typing_extensions import TypeVarTuple
-Ts = TypeVarTuple("Ts")  # OK
-[builtins fixtures/tuple.pyi]
-
-
 [case testDisableBytearrayPromotion]
 # flags: --disable-bytearray-promotion
 def f(x: bytes) -> None: ...
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 7070ead43746..4f468b59fc3f 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -1100,12 +1100,28 @@ reveal_type(b)  # N: Revealed type is "Tuple[builtins.int, builtins.int, builtin
 [case testTupleWithStarExpr2]
 a = [1]
 b = (0, *a)
+reveal_type(b)  # N: Revealed type is "builtins.tuple[builtins.int, ...]"
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithStarExpr2Precise]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
+a = [1]
+b = (0, *a)
 reveal_type(b)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]"
 [builtins fixtures/tuple.pyi]
 
 [case testTupleWithStarExpr3]
 a = ['']
 b = (0, *a)
+reveal_type(b)  # N: Revealed type is "builtins.tuple[builtins.object, ...]"
+c = (*a, '')
+reveal_type(c)  # N: Revealed type is "builtins.tuple[builtins.str, ...]"
+[builtins fixtures/tuple.pyi]
+
+[case testTupleWithStarExpr3Precise]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
+a = ['']
+b = (0, *a)
 reveal_type(b)  # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.str, ...]]]"
 c = (*a, '')
 reveal_type(c)  # N: Revealed type is "Tuple[Unpack[builtins.tuple[builtins.str, ...]], builtins.str]"
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 7b8a22313b36..a51b535a873c 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -1653,6 +1653,7 @@ def foo(arg: Tuple[int, Unpack[Ts], str]) -> None:
 [builtins fixtures/tuple.pyi]
 
 [case testPackingVariadicTuplesHomogeneous]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
 from typing import Tuple
 from typing_extensions import Unpack
 
@@ -1689,6 +1690,7 @@ def foo(arg: Tuple[int, Unpack[Ts], str]) -> None:
 [builtins fixtures/isinstancelist.pyi]
 
 [case testVariadicTupleInTupleContext]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
 from typing import Tuple, Optional
 from typing_extensions import TypeVarTuple, Unpack
 
@@ -1701,6 +1703,7 @@ vt2 = 1, *test(), 2  # E: Need type annotation for "vt2"
 [builtins fixtures/tuple.pyi]
 
 [case testVariadicTupleConcatenation]
+# flags: --enable-incomplete-feature=PreciseTupleTypes
 from typing import Tuple
 from typing_extensions import TypeVarTuple, Unpack
 
diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test
index 91242eb62fcf..f286f4781ed5 100644
--- a/test-data/unit/cmdline.test
+++ b/test-data/unit/cmdline.test
@@ -1421,14 +1421,6 @@ b \d+
 b\.c \d+
 .*
 
-[case testCmdlineEnableIncompleteFeatures]
-# cmd: mypy --enable-incomplete-features a.py
-[file a.py]
-pass
-[out]
-Warning: --enable-incomplete-features is deprecated, use --enable-incomplete-feature=FEATURE instead
-== Return code: 0
-
 [case testShadowTypingModuleEarlyLoad]
 # cmd: mypy dir
 [file dir/__init__.py]
@@ -1585,3 +1577,13 @@ disable_error_code =
 always_true =
   MY_VAR,
 [out]
+
+[case testTypeVarTupleUnpackEnabled]
+# cmd: mypy --enable-incomplete-feature=TypeVarTuple --enable-incomplete-feature=Unpack a.py
+[file a.py]
+from typing_extensions import TypeVarTuple
+Ts = TypeVarTuple("Ts")
+[out]
+Warning: TypeVarTuple is already enabled by default
+Warning: Unpack is already enabled by default
+== Return code: 0

From 5624f401b3786ebdbe167c27297ed778cce3faa5 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Mon, 30 Oct 2023 14:27:43 +0000
Subject: [PATCH 218/288] Fix daemon crash caused by deleted submodule (#16370)

If a submodule has been deleted while using a fine-grained cache, the
daemon could crash during fixup, since there could be a symbol table
entry in a parent package that would appear to refer to itself. Handle
the case by adding a placeholder symbol table entry instead. Eventually
the parent package will be reprocessed and the symbol table will be
completed.
---
 mypy/fixup.py                    | 19 +++++++++++++++++--
 mypy/nodes.py                    |  2 ++
 test-data/unit/fine-grained.test | 19 +++++++++++++++++++
 3 files changed, 38 insertions(+), 2 deletions(-)

diff --git a/mypy/fixup.py b/mypy/fixup.py
index 5ffc47120734..02c6ab93f29e 100644
--- a/mypy/fixup.py
+++ b/mypy/fixup.py
@@ -128,8 +128,23 @@ def visit_symbol_table(self, symtab: SymbolTable, table_fullname: str) -> None:
                         cross_ref, self.modules, raise_on_missing=not self.allow_missing
                     )
                     if stnode is not None:
-                        assert stnode.node is not None, (table_fullname + "." + key, cross_ref)
-                        value.node = stnode.node
+                        if stnode is value:
+                            # The node seems to refer to itself, which can mean that
+                            # the target is a deleted submodule of the current module,
+                            # and thus lookup falls back to the symbol table of the parent
+                            # package. Here's how this may happen:
+                            #
+                            #   pkg/__init__.py:
+                            #     from pkg import sub
+                            #
+                            # Now if pkg.sub is deleted, the pkg.sub symbol table entry
+                            # appears to refer to itself. Replace the entry with a
+                            # placeholder to avoid a crash. We can't delete the entry,
+                            # as it would stop dependency propagation.
+                            value.node = Var(key + "@deleted")
+                        else:
+                            assert stnode.node is not None, (table_fullname + "." + key, cross_ref)
+                            value.node = stnode.node
                     elif not self.allow_missing:
                         assert False, f"Could not find cross-ref {cross_ref}"
                     else:
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 1d7b3e3be84b..d65a23a6b7fe 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -3824,6 +3824,8 @@ def __str__(self) -> str:
         # Include declared type of variables and functions.
         if self.type is not None:
             s += f" : {self.type}"
+        if self.cross_ref:
+            s += f" cross_ref:{self.cross_ref}"
         return s
 
     def serialize(self, prefix: str, name: str) -> JsonDict:
diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test
index 5dc42bd62d9b..165a2089b466 100644
--- a/test-data/unit/fine-grained.test
+++ b/test-data/unit/fine-grained.test
@@ -10486,3 +10486,22 @@ reveal_type(s)
 ==
 ==
 b.py:2: note: Revealed type is "builtins.str"
+
+[case testRenameSubModule]
+import a
+
+[file a.py]
+import pkg.sub
+
+[file pkg/__init__.py]
+[file pkg/sub/__init__.py]
+from pkg.sub import mod
+[file pkg/sub/mod.py]
+
+[file pkg/sub/__init__.py.2]
+from pkg.sub import modb
+[delete pkg/sub/mod.py.2]
+[file pkg/sub/modb.py.2]
+
+[out]
+==

From 128176ad1150cebd4b4e20ff7f1ec3f9857d1754 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Mon, 30 Oct 2023 17:24:35 +0000
Subject: [PATCH 219/288] Bump version to 1.8.0+dev (#16372)

---
 mypy/version.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypy/version.py b/mypy/version.py
index 7cfc68d6e553..2c2c2b052da2 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -8,7 +8,7 @@
 # - Release versions have the form "1.2.3".
 # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440).
 # - Before 1.0 we had the form "0.NNN".
-__version__ = "1.7.0+dev"
+__version__ = "1.8.0+dev"
 base_version = __version__
 
 mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))

From 0ff7a29d5336dad6400a9356bd4116b59c20a875 Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra <jelle.zijlstra@gmail.com>
Date: Mon, 30 Oct 2023 11:48:07 -0700
Subject: [PATCH 220/288] stubgen: include __all__ in output (#16356)

Fixes #10314
---
 CHANGELOG.md                |  2 +-
 mypy/stubutil.py            | 56 ++++++++++++++++++++++++++-----------
 test-data/unit/stubgen.test | 40 +++++++++++++++++++++++++-
 3 files changed, 79 insertions(+), 19 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index d8237795112b..74f7c676c279 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,7 +2,7 @@
 
 ## Unreleased
 
-...
+Stubgen will now include `__all__` in its output if it is in the input file (PR [16356](https://github.com/python/mypy/pull/16356)).
 
 #### Other Notable Changes and Fixes
 ...
diff --git a/mypy/stubutil.py b/mypy/stubutil.py
index cc3b63098fd2..5ec240087145 100644
--- a/mypy/stubutil.py
+++ b/mypy/stubutil.py
@@ -614,10 +614,24 @@ def get_imports(self) -> str:
 
     def output(self) -> str:
         """Return the text for the stub."""
-        imports = self.get_imports()
-        if imports and self._output:
-            imports += "\n"
-        return imports + "".join(self._output)
+        pieces: list[str] = []
+        if imports := self.get_imports():
+            pieces.append(imports)
+        if dunder_all := self.get_dunder_all():
+            pieces.append(dunder_all)
+        if self._output:
+            pieces.append("".join(self._output))
+        return "\n".join(pieces)
+
+    def get_dunder_all(self) -> str:
+        """Return the __all__ list for the stub."""
+        if self._all_:
+            # Note we emit all names in the runtime __all__ here, even if they
+            # don't actually exist. If that happens, the runtime has a bug, and
+            # it's not obvious what the correct behavior should be. We choose
+            # to reflect the runtime __all__ as closely as possible.
+            return f"__all__ = {self._all_!r}\n"
+        return ""
 
     def add(self, string: str) -> None:
         """Add text to generated stub."""
@@ -651,8 +665,7 @@ def set_defined_names(self, defined_names: set[str]) -> None:
         self.defined_names = defined_names
         # Names in __all__ are required
         for name in self._all_ or ():
-            if name not in self.IGNORED_DUNDERS:
-                self.import_tracker.reexport(name)
+            self.import_tracker.reexport(name)
 
         # These are "soft" imports for objects which might appear in annotations but not have
         # a corresponding import statement.
@@ -751,7 +764,13 @@ def is_private_name(self, name: str, fullname: str | None = None) -> bool:
             return False
         if name == "_":
             return False
-        return name.startswith("_") and (not name.endswith("__") or name in self.IGNORED_DUNDERS)
+        if not name.startswith("_"):
+            return False
+        if self._all_ and name in self._all_:
+            return False
+        if name.startswith("__") and name.endswith("__"):
+            return name in self.IGNORED_DUNDERS
+        return True
 
     def should_reexport(self, name: str, full_module: str, name_is_alias: bool) -> bool:
         if (
@@ -761,18 +780,21 @@ def should_reexport(self, name: str, full_module: str, name_is_alias: bool) -> b
         ):
             # Special case certain names that should be exported, against our general rules.
             return True
+        if name_is_alias:
+            return False
+        if self.export_less:
+            return False
+        if not self.module_name:
+            return False
         is_private = self.is_private_name(name, full_module + "." + name)
+        if is_private:
+            return False
         top_level = full_module.split(".")[0]
         self_top_level = self.module_name.split(".", 1)[0]
-        if (
-            not name_is_alias
-            and not self.export_less
-            and (not self._all_ or name in self.IGNORED_DUNDERS)
-            and self.module_name
-            and not is_private
-            and top_level in (self_top_level, "_" + self_top_level)
-        ):
+        if top_level not in (self_top_level, "_" + self_top_level):
             # Export imports from the same package, since we can't reliably tell whether they
             # are part of the public API.
-            return True
-        return False
+            return False
+        if self._all_:
+            return name in self._all_
+        return True
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index 895500c1ba57..2a43ce16383d 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -587,6 +587,8 @@ __all__ = [] + ['f']
 def f(): ...
 def g(): ...
 [out]
+__all__ = ['f']
+
 def f() -> None: ...
 
 [case testOmitDefsNotInAll_semanal]
@@ -594,6 +596,8 @@ __all__ = ['f']
 def f(): ...
 def g(): ...
 [out]
+__all__ = ['f']
+
 def f() -> None: ...
 
 [case testOmitDefsNotInAll_inspect]
@@ -601,6 +605,8 @@ __all__ = [] + ['f']
 def f(): ...
 def g(): ...
 [out]
+__all__ = ['f']
+
 def f(): ...
 
 [case testVarDefsNotInAll_import]
@@ -610,6 +616,8 @@ x = 1
 y = 1
 def g(): ...
 [out]
+__all__ = ['f', 'g']
+
 def f() -> None: ...
 def g() -> None: ...
 
@@ -620,6 +628,8 @@ x = 1
 y = 1
 def g(): ...
 [out]
+__all__ = ['f', 'g']
+
 def f(): ...
 def g(): ...
 
@@ -628,6 +638,8 @@ __all__ = [] + ['f']
 def f(): ...
 class A: ...
 [out]
+__all__ = ['f']
+
 def f() -> None: ...
 
 class A: ...
@@ -637,6 +649,8 @@ __all__ = [] + ['f']
 def f(): ...
 class A: ...
 [out]
+__all__ = ['f']
+
 def f(): ...
 
 class A: ...
@@ -647,6 +661,8 @@ class A:
     x = 1
     def f(self): ...
 [out]
+__all__ = ['A']
+
 class A:
     x: int
     def f(self) -> None: ...
@@ -684,6 +700,8 @@ x = 1
 [out]
 from re import match as match, sub as sub
 
+__all__ = ['match', 'sub', 'x']
+
 x: int
 
 [case testExportModule_import]
@@ -694,6 +712,8 @@ y = 2
 [out]
 import re as re
 
+__all__ = ['re', 'x']
+
 x: int
 
 [case testExportModule2_import]
@@ -704,6 +724,8 @@ y = 2
 [out]
 import re as re
 
+__all__ = ['re', 'x']
+
 x: int
 
 [case testExportModuleAs_import]
@@ -714,6 +736,8 @@ y = 2
 [out]
 import re as rex
 
+__all__ = ['rex', 'x']
+
 x: int
 
 [case testExportModuleInPackage_import]
@@ -722,6 +746,8 @@ __all__ = ['p']
 [out]
 import urllib.parse as p
 
+__all__ = ['p']
+
 [case testExportPackageOfAModule_import]
 import urllib.parse
 __all__ = ['urllib']
@@ -729,6 +755,8 @@ __all__ = ['urllib']
 [out]
 import urllib as urllib
 
+__all__ = ['urllib']
+
 [case testRelativeImportAll]
 from .x import *
 [out]
@@ -741,6 +769,8 @@ x = 1
 class C:
     def g(self): ...
 [out]
+__all__ = ['f', 'x', 'C', 'g']
+
 def f() -> None: ...
 
 x: int
@@ -758,6 +788,8 @@ x = 1
 class C:
     def g(self): ...
 [out]
+__all__ = ['f', 'x', 'C', 'g']
+
 def f(): ...
 
 x: int
@@ -2343,6 +2375,8 @@ else:
 [out]
 import cookielib as cookielib
 
+__all__ = ['cookielib']
+
 [case testCannotCalculateMRO_semanal]
 class X: pass
 
@@ -2788,6 +2822,8 @@ class A: pass
 # p/__init__.pyi
 from p.a import A
 
+__all__ = ['a']
+
 a: A
 # p/a.pyi
 class A: ...
@@ -2961,7 +2997,9 @@ __uri__ = ''
 __version__ = ''
 
 [out]
-from m import __version__ as __version__
+from m import __about__ as __about__, __author__ as __author__, __version__ as __version__
+
+__all__ = ['__about__', '__author__', '__version__']
 
 [case testAttrsClass_semanal]
 import attrs

From 4291b2c07f8dd862c6656d51b8267e7fc84ad1dc Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
 <41898282+github-actions[bot]@users.noreply.github.com>
Date: Tue, 31 Oct 2023 19:24:30 -0700
Subject: [PATCH 221/288] Sync typeshed (#16382)

Source commit:

https://github.com/python/typeshed/commit/3c872ca8fd875f2dc5fe5f5d771e35c58390cd0e

Co-authored-by: mypybot <>
Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Co-authored-by: AlexWaygood <alex.waygood@gmail.com>
---
 mypy/typeshed/stdlib/VERSIONS                 |   6 +
 mypy/typeshed/stdlib/_collections_abc.pyi     |   2 +-
 mypy/typeshed/stdlib/_typeshed/__init__.pyi   |   2 +-
 mypy/typeshed/stdlib/_weakrefset.pyi          |   4 +-
 mypy/typeshed/stdlib/_winapi.pyi              |   2 +-
 mypy/typeshed/stdlib/array.pyi                |   4 +-
 mypy/typeshed/stdlib/asyncio/tasks.pyi        |   4 +-
 mypy/typeshed/stdlib/builtins.pyi             |  24 ++--
 mypy/typeshed/stdlib/collections/__init__.pyi |   8 +-
 mypy/typeshed/stdlib/contextlib.pyi           |   2 +-
 mypy/typeshed/stdlib/csv.pyi                  |   2 +-
 mypy/typeshed/stdlib/fileinput.pyi            |   4 +-
 mypy/typeshed/stdlib/hashlib.pyi              |   9 +-
 mypy/typeshed/stdlib/importlib/abc.pyi        |  88 +++-----------
 mypy/typeshed/stdlib/importlib/readers.pyi    |  68 +++++++++++
 .../stdlib/importlib/resources/readers.pyi    |  14 +++
 .../stdlib/importlib/resources/simple.pyi     |  49 ++++++++
 mypy/typeshed/stdlib/importlib/simple.pyi     |  11 ++
 mypy/typeshed/stdlib/inspect.pyi              |   8 ++
 mypy/typeshed/stdlib/io.pyi                   |  10 +-
 mypy/typeshed/stdlib/itertools.pyi            |  30 ++---
 mypy/typeshed/stdlib/nt.pyi                   | 111 ++++++++++++++++++
 .../stdlib/{sys.pyi => sys/__init__.pyi}      |   4 +
 mypy/typeshed/stdlib/sys/_monitoring.pyi      |  52 ++++++++
 mypy/typeshed/stdlib/tempfile.pyi             |   2 +-
 mypy/typeshed/stdlib/tkinter/__init__.pyi     |  84 +++++++++++--
 mypy/typeshed/stdlib/types.pyi                |   4 +-
 mypy/typeshed/stdlib/typing.pyi               |  10 +-
 mypy/typeshed/stdlib/weakref.pyi              |   2 +-
 29 files changed, 476 insertions(+), 144 deletions(-)
 create mode 100644 mypy/typeshed/stdlib/importlib/readers.pyi
 create mode 100644 mypy/typeshed/stdlib/importlib/resources/readers.pyi
 create mode 100644 mypy/typeshed/stdlib/importlib/resources/simple.pyi
 create mode 100644 mypy/typeshed/stdlib/importlib/simple.pyi
 create mode 100644 mypy/typeshed/stdlib/nt.pyi
 rename mypy/typeshed/stdlib/{sys.pyi => sys/__init__.pyi} (99%)
 create mode 100644 mypy/typeshed/stdlib/sys/_monitoring.pyi

diff --git a/mypy/typeshed/stdlib/VERSIONS b/mypy/typeshed/stdlib/VERSIONS
index 9d4636a29a1d..d24e85c8fe44 100644
--- a/mypy/typeshed/stdlib/VERSIONS
+++ b/mypy/typeshed/stdlib/VERSIONS
@@ -152,8 +152,12 @@ imp: 2.7-3.11
 importlib: 2.7-
 importlib.metadata: 3.8-
 importlib.metadata._meta: 3.10-
+importlib.readers: 3.10-
 importlib.resources: 3.7-
 importlib.resources.abc: 3.11-
+importlib.resources.readers: 3.11-
+importlib.resources.simple: 3.11-
+importlib.simple: 3.11-
 inspect: 2.7-
 io: 2.7-
 ipaddress: 3.3-
@@ -181,6 +185,7 @@ multiprocessing.shared_memory: 3.8-
 netrc: 2.7-
 nis: 2.7-
 nntplib: 2.7-
+nt: 2.7-
 ntpath: 2.7-
 nturl2path: 2.7-
 numbers: 2.7-
@@ -250,6 +255,7 @@ sunau: 2.7-
 symbol: 2.7-3.9
 symtable: 2.7-
 sys: 2.7-
+sys._monitoring: 3.12-  # Doesn't actually exist. See comments in the stub.
 sysconfig: 2.7-
 syslog: 2.7-
 tabnanny: 2.7-
diff --git a/mypy/typeshed/stdlib/_collections_abc.pyi b/mypy/typeshed/stdlib/_collections_abc.pyi
index 2b57f157a0e4..8520e9e4ed9b 100644
--- a/mypy/typeshed/stdlib/_collections_abc.pyi
+++ b/mypy/typeshed/stdlib/_collections_abc.pyi
@@ -81,7 +81,7 @@ class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]):  # undocumented
         def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
 
 @final
-class dict_items(ItemsView[_KT_co, _VT_co], Generic[_KT_co, _VT_co]):  # undocumented
+class dict_items(ItemsView[_KT_co, _VT_co]):  # undocumented
     def __eq__(self, __value: object) -> bool: ...
     if sys.version_info >= (3, 10):
         @property
diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi
index 8e92138c748a..ad214a2a5e0d 100644
--- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi
+++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi
@@ -47,7 +47,7 @@ Unused: TypeAlias = object
 # _SentinelType = NewType("_SentinelType", object)
 # sentinel: _SentinelType
 # def foo(x: int | None | _SentinelType = ...) -> None: ...
-sentinel = Any  # noqa: Y026
+sentinel: Any
 
 # stable
 class IdentityFunction(Protocol):
diff --git a/mypy/typeshed/stdlib/_weakrefset.pyi b/mypy/typeshed/stdlib/_weakrefset.pyi
index d73d79155329..6482ade1271e 100644
--- a/mypy/typeshed/stdlib/_weakrefset.pyi
+++ b/mypy/typeshed/stdlib/_weakrefset.pyi
@@ -1,6 +1,6 @@
 import sys
 from collections.abc import Iterable, Iterator, MutableSet
-from typing import Any, Generic, TypeVar, overload
+from typing import Any, TypeVar, overload
 from typing_extensions import Self
 
 if sys.version_info >= (3, 9):
@@ -11,7 +11,7 @@ __all__ = ["WeakSet"]
 _S = TypeVar("_S")
 _T = TypeVar("_T")
 
-class WeakSet(MutableSet[_T], Generic[_T]):
+class WeakSet(MutableSet[_T]):
     @overload
     def __init__(self, data: None = None) -> None: ...
     @overload
diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi
index e887fb38a7fa..1aec6ce50443 100644
--- a/mypy/typeshed/stdlib/_winapi.pyi
+++ b/mypy/typeshed/stdlib/_winapi.pyi
@@ -54,7 +54,7 @@ if sys.platform == "win32":
     HIGH_PRIORITY_CLASS: Literal[0x80]
     INFINITE: Literal[0xFFFFFFFF]
     if sys.version_info >= (3, 8):
-        # Ignore the flake8 error -- flake8-pyi assumes
+        # Ignore the Flake8 error -- flake8-pyi assumes
         # most numbers this long will be implementation details,
         # but here we can see that it's a power of 2
         INVALID_HANDLE_VALUE: Literal[0xFFFFFFFFFFFFFFFF]  # noqa: Y054
diff --git a/mypy/typeshed/stdlib/array.pyi b/mypy/typeshed/stdlib/array.pyi
index b533f9240073..2ef821fcf87a 100644
--- a/mypy/typeshed/stdlib/array.pyi
+++ b/mypy/typeshed/stdlib/array.pyi
@@ -3,7 +3,7 @@ from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite
 from collections.abc import Iterable
 
 # pytype crashes if array inherits from collections.abc.MutableSequence instead of typing.MutableSequence
-from typing import Any, Generic, MutableSequence, TypeVar, overload  # noqa: Y022
+from typing import Any, MutableSequence, TypeVar, overload  # noqa: Y022
 from typing_extensions import Literal, Self, SupportsIndex, TypeAlias
 
 if sys.version_info >= (3, 12):
@@ -18,7 +18,7 @@ _T = TypeVar("_T", int, float, str)
 
 typecodes: str
 
-class array(MutableSequence[_T], Generic[_T]):
+class array(MutableSequence[_T]):
     @property
     def typecode(self) -> _TypeCode: ...
     @property
diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi
index 366ac7fa35e3..cdac7d359781 100644
--- a/mypy/typeshed/stdlib/asyncio/tasks.pyi
+++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi
@@ -2,7 +2,7 @@ import concurrent.futures
 import sys
 from collections.abc import Awaitable, Coroutine, Generator, Iterable, Iterator
 from types import FrameType
-from typing import Any, Generic, Protocol, TextIO, TypeVar, overload
+from typing import Any, Protocol, TextIO, TypeVar, overload
 from typing_extensions import Literal, TypeAlias
 
 from . import _CoroutineLike
@@ -379,7 +379,7 @@ else:
 # While this is true in general, here it's sort-of okay to have a covariant subclass,
 # since the only reason why `asyncio.Future` is invariant is the `set_result()` method,
 # and `asyncio.Task.set_result()` always raises.
-class Task(Future[_T_co], Generic[_T_co]):  # type: ignore[type-var]  # pyright: ignore[reportGeneralTypeIssues]
+class Task(Future[_T_co]):  # type: ignore[type-var]  # pyright: ignore[reportGeneralTypeIssues]
     if sys.version_info >= (3, 12):
         def __init__(
             self,
diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index dedd72933028..96a1d1e31b17 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -287,7 +287,7 @@ class int:
     def __pow__(self, __value: _PositiveInteger, __mod: None = None) -> int: ...
     @overload
     def __pow__(self, __value: _NegativeInteger, __mod: None = None) -> float: ...
-    # positive x -> int; negative x -> float
+    # positive __value -> int; negative __value -> float
     # return type must be Any as `int | float` causes too many false-positive errors
     @overload
     def __pow__(self, __value: int, __mod: None = None) -> Any: ...
@@ -346,7 +346,7 @@ class float:
     def __divmod__(self, __value: float) -> tuple[float, float]: ...
     @overload
     def __pow__(self, __value: int, __mod: None = None) -> float: ...
-    # positive x -> float; negative x -> complex
+    # positive __value -> float; negative __value -> complex
     # return type must be Any as `float | complex` causes too many false-positive errors
     @overload
     def __pow__(self, __value: float, __mod: None = None) -> Any: ...
@@ -860,7 +860,7 @@ class slice:
     __hash__: ClassVar[None]  # type: ignore[assignment]
     def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ...
 
-class tuple(Sequence[_T_co], Generic[_T_co]):
+class tuple(Sequence[_T_co]):
     def __new__(cls, __iterable: Iterable[_T_co] = ...) -> Self: ...
     def __len__(self) -> int: ...
     def __contains__(self, __key: object) -> bool: ...
@@ -912,7 +912,7 @@ class function:
     # mypy uses `builtins.function.__get__` to represent methods, properties, and getset_descriptors so we type the return as Any.
     def __get__(self, __instance: object, __owner: type | None = None) -> Any: ...
 
-class list(MutableSequence[_T], Generic[_T]):
+class list(MutableSequence[_T]):
     @overload
     def __init__(self) -> None: ...
     @overload
@@ -967,7 +967,7 @@ class list(MutableSequence[_T], Generic[_T]):
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
 
-class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+class dict(MutableMapping[_KT, _VT]):
     # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics
     # Also multiprocessing.managers.SyncManager.dict()
     @overload
@@ -1040,7 +1040,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
         @overload
         def __ior__(self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ...
 
-class set(MutableSet[_T], Generic[_T]):
+class set(MutableSet[_T]):
     @overload
     def __init__(self) -> None: ...
     @overload
@@ -1080,7 +1080,7 @@ class set(MutableSet[_T], Generic[_T]):
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
 
-class frozenset(AbstractSet[_T_co], Generic[_T_co]):
+class frozenset(AbstractSet[_T_co]):
     @overload
     def __new__(cls) -> Self: ...
     @overload
@@ -1109,7 +1109,7 @@ class frozenset(AbstractSet[_T_co], Generic[_T_co]):
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
 
-class enumerate(Iterator[tuple[int, _T]], Generic[_T]):
+class enumerate(Iterator[tuple[int, _T]]):
     def __new__(cls, iterable: Iterable[_T], start: int = ...) -> Self: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> tuple[int, _T]: ...
@@ -1318,7 +1318,7 @@ else:
 
 def exit(code: sys._ExitCode = None) -> NoReturn: ...
 
-class filter(Iterator[_T], Generic[_T]):
+class filter(Iterator[_T]):
     @overload
     def __new__(cls, __function: None, __iterable: Iterable[_T | None]) -> Self: ...
     @overload
@@ -1377,7 +1377,7 @@ def len(__obj: Sized) -> int: ...
 def license() -> None: ...
 def locals() -> dict[str, Any]: ...
 
-class map(Iterator[_S], Generic[_S]):
+class map(Iterator[_S]):
     @overload
     def __new__(cls, __func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> Self: ...
     @overload
@@ -1649,7 +1649,7 @@ else:
 
 def quit(code: sys._ExitCode = None) -> NoReturn: ...
 
-class reversed(Iterator[_T], Generic[_T]):
+class reversed(Iterator[_T]):
     @overload
     def __init__(self, __sequence: Reversible[_T]) -> None: ...
     @overload
@@ -1723,7 +1723,7 @@ def vars(__object: type) -> types.MappingProxyType[str, Any]: ...  # type: ignor
 @overload
 def vars(__object: Any = ...) -> dict[str, Any]: ...
 
-class zip(Iterator[_T_co], Generic[_T_co]):
+class zip(Iterator[_T_co]):
     if sys.version_info >= (3, 10):
         @overload
         def __new__(cls, *, strict: bool = ...) -> zip[Any]: ...
diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi
index 1d560117a54f..bb214b5ea19b 100644
--- a/mypy/typeshed/stdlib/collections/__init__.pyi
+++ b/mypy/typeshed/stdlib/collections/__init__.pyi
@@ -45,7 +45,7 @@ def namedtuple(
     defaults: Iterable[Any] | None = None,
 ) -> type[tuple[Any, ...]]: ...
 
-class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+class UserDict(MutableMapping[_KT, _VT]):
     data: dict[_KT, _VT]
     # __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics
     @overload
@@ -228,7 +228,7 @@ class UserString(Sequence[UserString]):
     def upper(self) -> Self: ...
     def zfill(self, width: int) -> Self: ...
 
-class deque(MutableSequence[_T], Generic[_T]):
+class deque(MutableSequence[_T]):
     @property
     def maxlen(self) -> int | None: ...
     @overload
@@ -383,7 +383,7 @@ class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
         @overload
         def __ror__(self, __value: dict[_T1, _T2]) -> OrderedDict[_KT | _T1, _VT | _T2]: ...  # type: ignore[misc]
 
-class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]):
+class defaultdict(dict[_KT, _VT]):
     default_factory: Callable[[], _VT] | None
     @overload
     def __init__(self) -> None: ...
@@ -424,7 +424,7 @@ class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]):
         @overload
         def __ror__(self, __value: dict[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ...  # type: ignore[misc]
 
-class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]):
+class ChainMap(MutableMapping[_KT, _VT]):
     maps: list[MutableMapping[_KT, _VT]]
     def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ...
     def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ...
diff --git a/mypy/typeshed/stdlib/contextlib.pyi b/mypy/typeshed/stdlib/contextlib.pyi
index dc2101dc01f7..c1bfedd2d1da 100644
--- a/mypy/typeshed/stdlib/contextlib.pyi
+++ b/mypy/typeshed/stdlib/contextlib.pyi
@@ -94,7 +94,7 @@ if sys.version_info >= (3, 10):
         ) -> bool | None: ...
 
 else:
-    class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co], Generic[_T_co]):
+    class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co]):
         def __init__(self, func: Callable[..., AsyncIterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ...
         gen: AsyncGenerator[_T_co, Any]
         func: Callable[..., AsyncGenerator[_T_co, Any]]
diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi
index 53425fbcccb1..f48d9d2ff263 100644
--- a/mypy/typeshed/stdlib/csv.pyi
+++ b/mypy/typeshed/stdlib/csv.pyi
@@ -23,7 +23,7 @@ from _csv import (
 )
 
 if sys.version_info >= (3, 12):
-    from _csv import QUOTE_STRINGS as QUOTE_STRINGS, QUOTE_NOTNULL as QUOTE_NOTNULL
+    from _csv import QUOTE_NOTNULL as QUOTE_NOTNULL, QUOTE_STRINGS as QUOTE_STRINGS
 from _typeshed import SupportsWrite
 from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence
 from typing import Any, Generic, TypeVar, overload
diff --git a/mypy/typeshed/stdlib/fileinput.pyi b/mypy/typeshed/stdlib/fileinput.pyi
index e9f3713b4eaf..c2fd31d1ea77 100644
--- a/mypy/typeshed/stdlib/fileinput.pyi
+++ b/mypy/typeshed/stdlib/fileinput.pyi
@@ -2,7 +2,7 @@ import sys
 from _typeshed import AnyStr_co, StrOrBytesPath
 from collections.abc import Callable, Iterable, Iterator
 from types import TracebackType
-from typing import IO, Any, AnyStr, Generic, Protocol, overload
+from typing import IO, Any, AnyStr, Protocol, overload
 from typing_extensions import Literal, Self, TypeAlias
 
 if sys.version_info >= (3, 9):
@@ -158,7 +158,7 @@ def fileno() -> int: ...
 def isfirstline() -> bool: ...
 def isstdin() -> bool: ...
 
-class FileInput(Iterator[AnyStr], Generic[AnyStr]):
+class FileInput(Iterator[AnyStr]):
     if sys.version_info >= (3, 10):
         # encoding and errors are added
         @overload
diff --git a/mypy/typeshed/stdlib/hashlib.pyi b/mypy/typeshed/stdlib/hashlib.pyi
index 18b1ab549764..ed1321f23b9e 100644
--- a/mypy/typeshed/stdlib/hashlib.pyi
+++ b/mypy/typeshed/stdlib/hashlib.pyi
@@ -113,14 +113,7 @@ shake_128 = _VarLenHash
 shake_256 = _VarLenHash
 
 def scrypt(
-    password: ReadableBuffer,
-    *,
-    salt: ReadableBuffer | None = None,
-    n: int | None = None,
-    r: int | None = None,
-    p: int | None = None,
-    maxmem: int = 0,
-    dklen: int = 64,
+    password: ReadableBuffer, *, salt: ReadableBuffer, n: int, r: int, p: int, maxmem: int = 0, dklen: int = 64
 ) -> bytes: ...
 @final
 class _BlakeHash(_Hash):
diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi
index 28c33205a4df..438dbafb48c3 100644
--- a/mypy/typeshed/stdlib/importlib/abc.pyi
+++ b/mypy/typeshed/stdlib/importlib/abc.pyi
@@ -1,20 +1,12 @@
 import _ast
 import sys
 import types
-from _typeshed import (
-    OpenBinaryMode,
-    OpenBinaryModeReading,
-    OpenBinaryModeUpdating,
-    OpenBinaryModeWriting,
-    OpenTextMode,
-    ReadableBuffer,
-    StrPath,
-)
+from _typeshed import ReadableBuffer, StrPath
 from abc import ABCMeta, abstractmethod
 from collections.abc import Iterator, Mapping, Sequence
 from importlib.machinery import ModuleSpec
-from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper
-from typing import IO, Any, BinaryIO, NoReturn, Protocol, overload, runtime_checkable
+from io import BufferedReader
+from typing import IO, Any, Protocol, overload, runtime_checkable
 from typing_extensions import Literal
 
 if sys.version_info >= (3, 11):
@@ -139,72 +131,26 @@ if sys.version_info >= (3, 9):
             def joinpath(self, *descendants: str) -> Traversable: ...
         else:
             @abstractmethod
-            def joinpath(self, child: str) -> Traversable: ...
-        # The .open method comes from pathlib.pyi and should be kept in sync.
-        @overload
-        @abstractmethod
-        def open(
-            self,
-            mode: OpenTextMode = "r",
-            buffering: int = ...,
-            encoding: str | None = ...,
-            errors: str | None = ...,
-            newline: str | None = ...,
-        ) -> TextIOWrapper: ...
-        # Unbuffered binary mode: returns a FileIO
-        @overload
-        @abstractmethod
-        def open(
-            self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = None, errors: None = None, newline: None = None
-        ) -> FileIO: ...
-        # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter
-        @overload
-        @abstractmethod
-        def open(
-            self,
-            mode: OpenBinaryModeUpdating,
-            buffering: Literal[-1, 1] = ...,
-            encoding: None = None,
-            errors: None = None,
-            newline: None = None,
-        ) -> BufferedRandom: ...
-        @overload
-        @abstractmethod
-        def open(
-            self,
-            mode: OpenBinaryModeWriting,
-            buffering: Literal[-1, 1] = ...,
-            encoding: None = None,
-            errors: None = None,
-            newline: None = None,
-        ) -> BufferedWriter: ...
-        @overload
-        @abstractmethod
-        def open(
-            self,
-            mode: OpenBinaryModeReading,
-            buffering: Literal[-1, 1] = ...,
-            encoding: None = None,
-            errors: None = None,
-            newline: None = None,
-        ) -> BufferedReader: ...
-        # Buffering cannot be determined: fall back to BinaryIO
+            def joinpath(self, __child: str) -> Traversable: ...
+
+        # The documentation and runtime protocol allows *args, **kwargs arguments,
+        # but this would mean that all implementors would have to support them,
+        # which is not the case.
         @overload
         @abstractmethod
-        def open(
-            self, mode: OpenBinaryMode, buffering: int = ..., encoding: None = None, errors: None = None, newline: None = None
-        ) -> BinaryIO: ...
-        # Fallback if mode is not specified
+        def open(self, mode: Literal["r", "w"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ...
         @overload
         @abstractmethod
-        def open(
-            self, mode: str, buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., newline: str | None = ...
-        ) -> IO[Any]: ...
+        def open(self, mode: Literal["rb", "wb"]) -> IO[bytes]: ...
         @property
         @abstractmethod
         def name(self) -> str: ...
-        @abstractmethod
-        def __truediv__(self, child: str) -> Traversable: ...
+        if sys.version_info >= (3, 10):
+            def __truediv__(self, __child: str) -> Traversable: ...
+        else:
+            @abstractmethod
+            def __truediv__(self, __child: str) -> Traversable: ...
+
         @abstractmethod
         def read_bytes(self) -> bytes: ...
         @abstractmethod
@@ -214,6 +160,6 @@ if sys.version_info >= (3, 9):
         @abstractmethod
         def files(self) -> Traversable: ...
         def open_resource(self, resource: str) -> BufferedReader: ...
-        def resource_path(self, resource: Any) -> NoReturn: ...
+        def resource_path(self, resource: Any) -> str: ...
         def is_resource(self, path: str) -> bool: ...
         def contents(self) -> Iterator[str]: ...
diff --git a/mypy/typeshed/stdlib/importlib/readers.pyi b/mypy/typeshed/stdlib/importlib/readers.pyi
new file mode 100644
index 000000000000..f34794601b59
--- /dev/null
+++ b/mypy/typeshed/stdlib/importlib/readers.pyi
@@ -0,0 +1,68 @@
+# On py311+, things are actually defined in importlib.resources.readers,
+# and re-exported here,
+# but doing it this way leads to less code duplication for us
+
+import pathlib
+import sys
+import zipfile
+from _typeshed import Incomplete, StrPath
+from collections.abc import Iterable, Iterator
+from io import BufferedReader
+from typing import NoReturn, TypeVar
+from typing_extensions import Literal, Never
+
+if sys.version_info >= (3, 11):
+    import importlib.resources.abc as abc
+else:
+    import importlib.abc as abc
+
+if sys.version_info >= (3, 10):
+    if sys.version_info >= (3, 11):
+        __all__ = ["FileReader", "ZipReader", "MultiplexedPath", "NamespaceReader"]
+
+    if sys.version_info < (3, 11):
+        _T = TypeVar("_T")
+
+        def remove_duplicates(items: Iterable[_T]) -> Iterator[_T]: ...
+
+    class FileReader(abc.TraversableResources):
+        path: pathlib.Path
+        def __init__(self, loader) -> None: ...
+        def resource_path(self, resource: StrPath) -> str: ...
+        def files(self) -> pathlib.Path: ...
+
+    class ZipReader(abc.TraversableResources):
+        prefix: str
+        archive: Incomplete
+        def __init__(self, loader, module: str) -> None: ...
+        def open_resource(self, resource: str) -> BufferedReader: ...
+        def is_resource(self, path: StrPath) -> bool: ...
+        def files(self) -> zipfile.Path: ...
+
+    class MultiplexedPath(abc.Traversable):
+        def __init__(self, *paths: abc.Traversable) -> None: ...
+        def iterdir(self) -> Iterator[abc.Traversable]: ...
+        def read_bytes(self) -> NoReturn: ...
+        def read_text(self, *args: Never, **kwargs: Never) -> NoReturn: ...  # type: ignore[override]
+        def is_dir(self) -> Literal[True]: ...
+        def is_file(self) -> Literal[False]: ...
+
+        if sys.version_info >= (3, 12):
+            def joinpath(self, *descendants: str) -> abc.Traversable: ...
+        elif sys.version_info >= (3, 11):
+            def joinpath(self, child: str) -> abc.Traversable: ...  # type: ignore[override]
+        else:
+            def joinpath(self, child: str) -> abc.Traversable: ...
+
+        if sys.version_info < (3, 12):
+            __truediv__ = joinpath
+
+        def open(self, *args: Never, **kwargs: Never) -> NoReturn: ...  # type: ignore[override]
+        @property
+        def name(self) -> str: ...
+
+    class NamespaceReader(abc.TraversableResources):
+        path: MultiplexedPath
+        def __init__(self, namespace_path) -> None: ...
+        def resource_path(self, resource: str) -> str: ...
+        def files(self) -> MultiplexedPath: ...
diff --git a/mypy/typeshed/stdlib/importlib/resources/readers.pyi b/mypy/typeshed/stdlib/importlib/resources/readers.pyi
new file mode 100644
index 000000000000..0ab21fd29114
--- /dev/null
+++ b/mypy/typeshed/stdlib/importlib/resources/readers.pyi
@@ -0,0 +1,14 @@
+# On py311+, things are actually defined here
+# and re-exported from importlib.readers,
+# but doing it this way leads to less code duplication for us
+
+import sys
+from collections.abc import Iterable, Iterator
+from typing import TypeVar
+
+if sys.version_info >= (3, 11):
+    from importlib.readers import *
+
+    _T = TypeVar("_T")
+
+    def remove_duplicates(items: Iterable[_T]) -> Iterator[_T]: ...
diff --git a/mypy/typeshed/stdlib/importlib/resources/simple.pyi b/mypy/typeshed/stdlib/importlib/resources/simple.pyi
new file mode 100644
index 000000000000..9502375d00a2
--- /dev/null
+++ b/mypy/typeshed/stdlib/importlib/resources/simple.pyi
@@ -0,0 +1,49 @@
+import abc
+import sys
+from _typeshed import Incomplete, OpenBinaryMode, OpenTextMode, Unused
+from collections.abc import Iterator
+from io import TextIOWrapper
+from typing import IO, Any, BinaryIO, NoReturn, overload
+from typing_extensions import Literal, Never
+
+if sys.version_info >= (3, 11):
+    from .abc import Traversable, TraversableResources
+
+    class SimpleReader(abc.ABC):
+        @property
+        @abc.abstractmethod
+        def package(self) -> str: ...
+        @abc.abstractmethod
+        def children(self) -> list[SimpleReader]: ...
+        @abc.abstractmethod
+        def resources(self) -> list[str]: ...
+        @abc.abstractmethod
+        def open_binary(self, resource: str) -> BinaryIO: ...
+        @property
+        def name(self) -> str: ...
+
+    class ResourceHandle(Traversable, metaclass=abc.ABCMeta):
+        parent: ResourceContainer
+        def __init__(self, parent: ResourceContainer, name: str) -> None: ...
+        def is_file(self) -> Literal[True]: ...
+        def is_dir(self) -> Literal[False]: ...
+        @overload
+        def open(self, mode: OpenTextMode = "r", *args: Incomplete, **kwargs: Incomplete) -> TextIOWrapper: ...
+        @overload
+        def open(self, mode: OpenBinaryMode, *args: Unused, **kwargs: Unused) -> BinaryIO: ...
+        @overload
+        def open(self, mode: str, *args: Incomplete, **kwargs: Incomplete) -> IO[Any]: ...
+        def joinpath(self, name: Never) -> NoReturn: ...  # type: ignore[override]
+
+    class ResourceContainer(Traversable, metaclass=abc.ABCMeta):
+        reader: SimpleReader
+        def __init__(self, reader: SimpleReader) -> None: ...
+        def is_dir(self) -> Literal[True]: ...
+        def is_file(self) -> Literal[False]: ...
+        def iterdir(self) -> Iterator[ResourceHandle | ResourceContainer]: ...
+        def open(self, *args: Never, **kwargs: Never) -> NoReturn: ...  # type: ignore[override]
+        if sys.version_info < (3, 12):
+            def joinpath(self, *descendants: str) -> Traversable: ...
+
+    class TraversableReader(TraversableResources, SimpleReader, metaclass=abc.ABCMeta):
+        def files(self) -> ResourceContainer: ...
diff --git a/mypy/typeshed/stdlib/importlib/simple.pyi b/mypy/typeshed/stdlib/importlib/simple.pyi
new file mode 100644
index 000000000000..58d8c6617082
--- /dev/null
+++ b/mypy/typeshed/stdlib/importlib/simple.pyi
@@ -0,0 +1,11 @@
+import sys
+
+if sys.version_info >= (3, 11):
+    from .resources.simple import (
+        ResourceContainer as ResourceContainer,
+        ResourceHandle as ResourceHandle,
+        SimpleReader as SimpleReader,
+        TraversableReader as TraversableReader,
+    )
+
+    __all__ = ["SimpleReader", "ResourceHandle", "ResourceContainer", "TraversableReader"]
diff --git a/mypy/typeshed/stdlib/inspect.pyi b/mypy/typeshed/stdlib/inspect.pyi
index 601d23e786ac..6498719df887 100644
--- a/mypy/typeshed/stdlib/inspect.pyi
+++ b/mypy/typeshed/stdlib/inspect.pyi
@@ -294,6 +294,14 @@ _SourceObjectType: TypeAlias = (
 
 def findsource(object: _SourceObjectType) -> tuple[list[str], int]: ...
 def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: ...
+
+# Special-case the two most common input types here
+# to avoid the annoyingly vague `Sequence[str]` return type
+@overload
+def getblock(lines: list[str]) -> list[str]: ...
+@overload
+def getblock(lines: tuple[str, ...]) -> tuple[str, ...]: ...
+@overload
 def getblock(lines: Sequence[str]) -> Sequence[str]: ...
 def getdoc(object: object) -> str | None: ...
 def getcomments(object: object) -> str | None: ...
diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi
index c114f839594f..b54e0a9fd05b 100644
--- a/mypy/typeshed/stdlib/io.pyi
+++ b/mypy/typeshed/stdlib/io.pyi
@@ -6,7 +6,7 @@ from _typeshed import FileDescriptorOrPath, ReadableBuffer, WriteableBuffer
 from collections.abc import Callable, Iterable, Iterator
 from os import _Opener
 from types import TracebackType
-from typing import IO, Any, BinaryIO, TextIO
+from typing import IO, Any, BinaryIO, TextIO, TypeVar, overload
 from typing_extensions import Literal, Self
 
 __all__ = [
@@ -33,6 +33,8 @@ __all__ = [
 if sys.version_info >= (3, 8):
     __all__ += ["open_code"]
 
+_T = TypeVar("_T")
+
 DEFAULT_BUFFER_SIZE: Literal[8192]
 
 SEEK_SET: Literal[0]
@@ -194,3 +196,9 @@ class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
     @property
     def newlines(self) -> str | tuple[str, ...] | None: ...
     def setstate(self, __state: tuple[bytes, int]) -> None: ...
+
+if sys.version_info >= (3, 10):
+    @overload
+    def text_encoding(__encoding: None, __stacklevel: int = 2) -> Literal["locale", "utf-8"]: ...
+    @overload
+    def text_encoding(__encoding: _T, __stacklevel: int = 2) -> _T: ...
diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi
index 4b5d624c78d7..ffa8e19391dd 100644
--- a/mypy/typeshed/stdlib/itertools.pyi
+++ b/mypy/typeshed/stdlib/itertools.pyi
@@ -23,7 +23,7 @@ _Predicate: TypeAlias = Callable[[_T], object]
 
 # Technically count can take anything that implements a number protocol and has an add method
 # but we can't enforce the add method
-class count(Iterator[_N], Generic[_N]):
+class count(Iterator[_N]):
     @overload
     def __new__(cls) -> count[int]: ...
     @overload
@@ -33,12 +33,12 @@ class count(Iterator[_N], Generic[_N]):
     def __next__(self) -> _N: ...
     def __iter__(self) -> Self: ...
 
-class cycle(Iterator[_T], Generic[_T]):
+class cycle(Iterator[_T]):
     def __init__(self, __iterable: Iterable[_T]) -> None: ...
     def __next__(self) -> _T: ...
     def __iter__(self) -> Self: ...
 
-class repeat(Iterator[_T], Generic[_T]):
+class repeat(Iterator[_T]):
     @overload
     def __init__(self, object: _T) -> None: ...
     @overload
@@ -47,7 +47,7 @@ class repeat(Iterator[_T], Generic[_T]):
     def __iter__(self) -> Self: ...
     def __length_hint__(self) -> int: ...
 
-class accumulate(Iterator[_T], Generic[_T]):
+class accumulate(Iterator[_T]):
     if sys.version_info >= (3, 8):
         @overload
         def __init__(self, iterable: Iterable[_T], func: None = None, *, initial: _T | None = ...) -> None: ...
@@ -59,7 +59,7 @@ class accumulate(Iterator[_T], Generic[_T]):
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
 
-class chain(Iterator[_T], Generic[_T]):
+class chain(Iterator[_T]):
     def __init__(self, *iterables: Iterable[_T]) -> None: ...
     def __next__(self) -> _T: ...
     def __iter__(self) -> Self: ...
@@ -69,17 +69,17 @@ class chain(Iterator[_T], Generic[_T]):
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, __item: Any) -> GenericAlias: ...
 
-class compress(Iterator[_T], Generic[_T]):
+class compress(Iterator[_T]):
     def __init__(self, data: Iterable[_T], selectors: Iterable[Any]) -> None: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
 
-class dropwhile(Iterator[_T], Generic[_T]):
+class dropwhile(Iterator[_T]):
     def __init__(self, __predicate: _Predicate[_T], __iterable: Iterable[_T]) -> None: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
 
-class filterfalse(Iterator[_T], Generic[_T]):
+class filterfalse(Iterator[_T]):
     def __init__(self, __predicate: _Predicate[_T] | None, __iterable: Iterable[_T]) -> None: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
@@ -92,7 +92,7 @@ class groupby(Iterator[tuple[_T, Iterator[_S]]], Generic[_T, _S]):
     def __iter__(self) -> Self: ...
     def __next__(self) -> tuple[_T, Iterator[_S]]: ...
 
-class islice(Iterator[_T], Generic[_T]):
+class islice(Iterator[_T]):
     @overload
     def __init__(self, __iterable: Iterable[_T], __stop: int | None) -> None: ...
     @overload
@@ -100,19 +100,19 @@ class islice(Iterator[_T], Generic[_T]):
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
 
-class starmap(Iterator[_T], Generic[_T]):
+class starmap(Iterator[_T]):
     def __init__(self, __function: Callable[..., _T], __iterable: Iterable[Iterable[Any]]) -> None: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
 
-class takewhile(Iterator[_T], Generic[_T]):
+class takewhile(Iterator[_T]):
     def __init__(self, __predicate: _Predicate[_T], __iterable: Iterable[_T]) -> None: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
 
 def tee(__iterable: Iterable[_T], __n: int = 2) -> tuple[Iterator[_T], ...]: ...
 
-class zip_longest(Iterator[_T_co], Generic[_T_co]):
+class zip_longest(Iterator[_T_co]):
     # one iterable (fillvalue doesn't matter)
     @overload
     def __new__(cls, __iter1: Iterable[_T1], *, fillvalue: object = ...) -> zip_longest[tuple[_T1]]: ...
@@ -192,7 +192,7 @@ class zip_longest(Iterator[_T_co], Generic[_T_co]):
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T_co: ...
 
-class product(Iterator[_T_co], Generic[_T_co]):
+class product(Iterator[_T_co]):
     @overload
     def __new__(cls, __iter1: Iterable[_T1]) -> product[tuple[_T1]]: ...
     @overload
@@ -246,7 +246,7 @@ class permutations(Iterator[tuple[_T, ...]], Generic[_T]):
     def __iter__(self) -> Self: ...
     def __next__(self) -> tuple[_T, ...]: ...
 
-class combinations(Iterator[_T_co], Generic[_T_co]):
+class combinations(Iterator[_T_co]):
     @overload
     def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations[tuple[_T, _T]]: ...
     @overload
@@ -266,7 +266,7 @@ class combinations_with_replacement(Iterator[tuple[_T, ...]], Generic[_T]):
     def __next__(self) -> tuple[_T, ...]: ...
 
 if sys.version_info >= (3, 10):
-    class pairwise(Iterator[_T_co], Generic[_T_co]):
+    class pairwise(Iterator[_T_co]):
         def __new__(cls, __iterable: Iterable[_T]) -> pairwise[tuple[_T, _T]]: ...
         def __iter__(self) -> Self: ...
         def __next__(self) -> _T_co: ...
diff --git a/mypy/typeshed/stdlib/nt.pyi b/mypy/typeshed/stdlib/nt.pyi
new file mode 100644
index 000000000000..4066096f4c71
--- /dev/null
+++ b/mypy/typeshed/stdlib/nt.pyi
@@ -0,0 +1,111 @@
+import sys
+
+if sys.platform == "win32":
+    # Actually defined here and re-exported from os at runtime,
+    # but this leads to less code duplication
+    from os import (
+        F_OK as F_OK,
+        O_APPEND as O_APPEND,
+        O_BINARY as O_BINARY,
+        O_CREAT as O_CREAT,
+        O_EXCL as O_EXCL,
+        O_NOINHERIT as O_NOINHERIT,
+        O_RANDOM as O_RANDOM,
+        O_RDONLY as O_RDONLY,
+        O_RDWR as O_RDWR,
+        O_SEQUENTIAL as O_SEQUENTIAL,
+        O_SHORT_LIVED as O_SHORT_LIVED,
+        O_TEMPORARY as O_TEMPORARY,
+        O_TEXT as O_TEXT,
+        O_TRUNC as O_TRUNC,
+        O_WRONLY as O_WRONLY,
+        P_DETACH as P_DETACH,
+        P_NOWAIT as P_NOWAIT,
+        P_NOWAITO as P_NOWAITO,
+        P_OVERLAY as P_OVERLAY,
+        P_WAIT as P_WAIT,
+        R_OK as R_OK,
+        TMP_MAX as TMP_MAX,
+        W_OK as W_OK,
+        X_OK as X_OK,
+        DirEntry as DirEntry,
+        abort as abort,
+        access as access,
+        chdir as chdir,
+        chmod as chmod,
+        close as close,
+        closerange as closerange,
+        cpu_count as cpu_count,
+        device_encoding as device_encoding,
+        dup as dup,
+        dup2 as dup2,
+        error as error,
+        execv as execv,
+        execve as execve,
+        fspath as fspath,
+        fstat as fstat,
+        fsync as fsync,
+        ftruncate as ftruncate,
+        get_handle_inheritable as get_handle_inheritable,
+        get_inheritable as get_inheritable,
+        get_terminal_size as get_terminal_size,
+        getcwd as getcwd,
+        getcwdb as getcwdb,
+        getlogin as getlogin,
+        getpid as getpid,
+        getppid as getppid,
+        isatty as isatty,
+        kill as kill,
+        link as link,
+        listdir as listdir,
+        lseek as lseek,
+        lstat as lstat,
+        mkdir as mkdir,
+        open as open,
+        pipe as pipe,
+        putenv as putenv,
+        read as read,
+        readlink as readlink,
+        remove as remove,
+        rename as rename,
+        replace as replace,
+        rmdir as rmdir,
+        scandir as scandir,
+        set_handle_inheritable as set_handle_inheritable,
+        set_inheritable as set_inheritable,
+        spawnv as spawnv,
+        spawnve as spawnve,
+        startfile as startfile,
+        stat as stat,
+        stat_result as stat_result,
+        statvfs_result as statvfs_result,
+        strerror as strerror,
+        symlink as symlink,
+        system as system,
+        terminal_size as terminal_size,
+        times as times,
+        times_result as times_result,
+        truncate as truncate,
+        umask as umask,
+        uname_result as uname_result,
+        unlink as unlink,
+        urandom as urandom,
+        utime as utime,
+        waitpid as waitpid,
+        write as write,
+    )
+
+    if sys.version_info >= (3, 9):
+        from os import unsetenv as unsetenv, waitstatus_to_exitcode as waitstatus_to_exitcode
+    if sys.version_info >= (3, 11):
+        from os import EX_OK as EX_OK
+    if sys.version_info >= (3, 12):
+        from os import (
+            get_blocking as get_blocking,
+            listdrives as listdrives,
+            listmounts as listmounts,
+            listvolumes as listvolumes,
+            set_blocking as set_blocking,
+        )
+
+    environ: dict[str, str]
diff --git a/mypy/typeshed/stdlib/sys.pyi b/mypy/typeshed/stdlib/sys/__init__.pyi
similarity index 99%
rename from mypy/typeshed/stdlib/sys.pyi
rename to mypy/typeshed/stdlib/sys/__init__.pyi
index a5e819d53326..cf3b1bc47d75 100644
--- a/mypy/typeshed/stdlib/sys.pyi
+++ b/mypy/typeshed/stdlib/sys/__init__.pyi
@@ -370,3 +370,7 @@ if sys.version_info >= (3, 12):
         def activate_stack_trampoline(__backend: str) -> None: ...
     else:
         def activate_stack_trampoline(__backend: str) -> NoReturn: ...
+
+    from . import _monitoring
+
+    monitoring = _monitoring
diff --git a/mypy/typeshed/stdlib/sys/_monitoring.pyi b/mypy/typeshed/stdlib/sys/_monitoring.pyi
new file mode 100644
index 000000000000..40aeb9cb5bdb
--- /dev/null
+++ b/mypy/typeshed/stdlib/sys/_monitoring.pyi
@@ -0,0 +1,52 @@
+# This py312+ module provides annotations for `sys.monitoring`.
+# It's named `sys._monitoring` in typeshed,
+# because trying to import `sys.monitoring` will fail at runtime!
+# At runtime, `sys.monitoring` has the unique status
+# of being a `types.ModuleType` instance that cannot be directly imported,
+# and exists in the `sys`-module namespace despite `sys` not being a package.
+
+from collections.abc import Callable
+from types import CodeType
+from typing import Any
+
+DEBUGGER_ID: int
+COVERAGE_ID: int
+PROFILER_ID: int
+OPTIMIZER_ID: int
+
+def use_tool_id(__tool_id: int, __name: str) -> None: ...
+def free_tool_id(__tool_id: int) -> None: ...
+def get_tool(__tool_id: int) -> str | None: ...
+
+events: _events
+
+class _events:
+    BRANCH: int
+    CALL: int
+    C_RAISE: int
+    C_RETURN: int
+    EXCEPTION_HANDLED: int
+    INSTRUCTION: int
+    JUMP: int
+    LINE: int
+    NO_EVENTS: int
+    PY_RESUME: int
+    PY_RETURN: int
+    PY_START: int
+    PY_THROW: int
+    PY_UNWIND: int
+    PY_YIELD: int
+    RAISE: int
+    RERAISE: int
+    STOP_ITERATION: int
+
+def get_events(__tool_id: int) -> int: ...
+def set_events(__tool_id: int, __event_set: int) -> None: ...
+def get_local_events(__tool_id: int, __code: CodeType) -> int: ...
+def set_local_events(__tool_id: int, __code: CodeType, __event_set: int) -> int: ...
+def restart_events() -> None: ...
+
+DISABLE: object
+MISSING: object
+
+def register_callback(__tool_id: int, __event: int, __func: Callable[..., Any] | None) -> Callable[..., Any] | None: ...
diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi
index 61bcde24255b..f8dcb24c1daf 100644
--- a/mypy/typeshed/stdlib/tempfile.pyi
+++ b/mypy/typeshed/stdlib/tempfile.pyi
@@ -321,7 +321,7 @@ else:
             dir: GenericPath[AnyStr] | None = None,
         ) -> IO[Any]: ...
 
-class _TemporaryFileWrapper(IO[AnyStr], Generic[AnyStr]):
+class _TemporaryFileWrapper(IO[AnyStr]):
     file: IO[AnyStr]  # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter
     name: str
     delete: bool
diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi
index a0a88a8ac82e..c35db3ef7e34 100644
--- a/mypy/typeshed/stdlib/tkinter/__init__.pyi
+++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi
@@ -2888,7 +2888,7 @@ class Scrollbar(Widget):
     def fraction(self, x: int, y: int) -> float: ...
     def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: ...
     def get(self) -> tuple[float, float, float, float] | tuple[float, float]: ...
-    def set(self, first: float, last: float) -> None: ...
+    def set(self, first: float | str, last: float | str) -> None: ...
 
 _TextIndex: TypeAlias = _tkinter.Tcl_Obj | str | float | Misc
 
@@ -3064,11 +3064,40 @@ class Text(Widget, XView, YView):
     def edit_separator(self) -> None: ...  # actually returns empty string
     def edit_undo(self) -> None: ...  # actually returns empty string
     def get(self, index1: _TextIndex, index2: _TextIndex | None = None) -> str: ...
-    # TODO: image_* methods
-    def image_cget(self, index, option): ...
-    def image_configure(self, index, cnf: Incomplete | None = None, **kw): ...
-    def image_create(self, index, cnf={}, **kw): ...
-    def image_names(self): ...
+    @overload
+    def image_cget(self, index: _TextIndex, option: Literal["image", "name"]) -> str: ...
+    @overload
+    def image_cget(self, index: _TextIndex, option: Literal["padx", "pady"]) -> int: ...
+    @overload
+    def image_cget(self, index: _TextIndex, option: Literal["align"]) -> Literal["baseline", "bottom", "center", "top"]: ...
+    @overload
+    def image_cget(self, index: _TextIndex, option: str) -> Any: ...
+    @overload
+    def image_configure(self, index: _TextIndex, cnf: str) -> tuple[str, str, str, str, str | int]: ...
+    @overload
+    def image_configure(
+        self,
+        index: _TextIndex,
+        cnf: dict[str, Any] | None = {},
+        *,
+        align: Literal["baseline", "bottom", "center", "top"] = ...,
+        image: _ImageSpec = ...,
+        name: str = ...,
+        padx: _ScreenUnits = ...,
+        pady: _ScreenUnits = ...,
+    ) -> dict[str, tuple[str, str, str, str, str | int]] | None: ...
+    def image_create(
+        self,
+        index: _TextIndex,
+        cnf: dict[str, Any] | None = {},
+        *,
+        align: Literal["baseline", "bottom", "center", "top"] = ...,
+        image: _ImageSpec = ...,
+        name: str = ...,
+        padx: _ScreenUnits = ...,
+        pady: _ScreenUnits = ...,
+    ) -> str: ...
+    def image_names(self) -> tuple[str, ...]: ...
     def index(self, index: _TextIndex) -> str: ...
     def insert(self, index: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: ...
     @overload
@@ -3166,12 +3195,45 @@ class Text(Widget, XView, YView):
     def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: ...
     # tag_remove and tag_delete are different
     def tag_remove(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None) -> None: ...
-    # TODO: window_* methods
-    def window_cget(self, index, option): ...
-    def window_configure(self, index, cnf: Incomplete | None = None, **kw): ...
+    @overload
+    def window_cget(self, index: _TextIndex, option: Literal["padx", "pady"]) -> int: ...
+    @overload
+    def window_cget(self, index: _TextIndex, option: Literal["stretch"]) -> bool: ...  # actually returns Literal[0, 1]
+    @overload
+    def window_cget(self, index: _TextIndex, option: Literal["align"]) -> Literal["baseline", "bottom", "center", "top"]: ...
+    @overload  # window is set to a widget, but read as the string name.
+    def window_cget(self, index: _TextIndex, option: Literal["create", "window"]) -> str: ...
+    @overload
+    def window_cget(self, index: _TextIndex, option: str) -> Any: ...
+    @overload
+    def window_configure(self, index: _TextIndex, cnf: str) -> tuple[str, str, str, str, str | int]: ...
+    @overload
+    def window_configure(
+        self,
+        index: _TextIndex,
+        cnf: dict[str, Any] | None = None,
+        *,
+        align: Literal["baseline", "bottom", "center", "top"] = ...,
+        create: str = ...,
+        padx: _ScreenUnits = ...,
+        pady: _ScreenUnits = ...,
+        stretch: bool | Literal[0, 1] = ...,
+        window: Misc | str = ...,
+    ) -> dict[str, tuple[str, str, str, str, str | int]] | None: ...
     window_config = window_configure
-    def window_create(self, index, cnf={}, **kw) -> None: ...
-    def window_names(self): ...
+    def window_create(
+        self,
+        index: _TextIndex,
+        cnf: dict[str, Any] | None = {},
+        *,
+        align: Literal["baseline", "bottom", "center", "top"] = ...,
+        create: str = ...,
+        padx: _ScreenUnits = ...,
+        pady: _ScreenUnits = ...,
+        stretch: bool | Literal[0, 1] = ...,
+        window: Misc | str = ...,
+    ) -> None: ...
+    def window_names(self) -> tuple[str, ...]: ...
     def yview_pickplace(self, *what): ...  # deprecated
 
 class _setit:
diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi
index 8559063834c9..a50bbf145b9f 100644
--- a/mypy/typeshed/stdlib/types.pyi
+++ b/mypy/typeshed/stdlib/types.pyi
@@ -16,7 +16,7 @@ from collections.abc import (
 from importlib.machinery import ModuleSpec
 
 # pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping
-from typing import Any, ClassVar, Generic, Mapping, Protocol, TypeVar, overload  # noqa: Y022
+from typing import Any, ClassVar, Mapping, Protocol, TypeVar, overload  # noqa: Y022
 from typing_extensions import Literal, ParamSpec, Self, TypeVarTuple, final
 
 __all__ = [
@@ -309,7 +309,7 @@ class CodeType:
         ) -> CodeType: ...
 
 @final
-class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]):
+class MappingProxyType(Mapping[_KT, _VT_co]):
     __hash__: ClassVar[None]  # type: ignore[assignment]
     def __new__(cls, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> Self: ...
     def __getitem__(self, __key: _KT) -> _VT_co: ...
diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi
index 6deb0ffd02b3..ad5719ca9f56 100644
--- a/mypy/typeshed/stdlib/typing.pyi
+++ b/mypy/typeshed/stdlib/typing.pyi
@@ -527,7 +527,7 @@ class Sequence(Collection[_T_co], Reversible[_T_co], Generic[_T_co]):
     def __iter__(self) -> Iterator[_T_co]: ...
     def __reversed__(self) -> Iterator[_T_co]: ...
 
-class MutableSequence(Sequence[_T], Generic[_T]):
+class MutableSequence(Sequence[_T]):
     @abstractmethod
     def insert(self, index: int, value: _T) -> None: ...
     @overload
@@ -557,7 +557,7 @@ class MutableSequence(Sequence[_T], Generic[_T]):
     def remove(self, value: _T) -> None: ...
     def __iadd__(self, values: Iterable[_T]) -> typing_extensions.Self: ...
 
-class AbstractSet(Collection[_T_co], Generic[_T_co]):
+class AbstractSet(Collection[_T_co]):
     @abstractmethod
     def __contains__(self, x: object) -> bool: ...
     def _hash(self) -> int: ...
@@ -573,7 +573,7 @@ class AbstractSet(Collection[_T_co], Generic[_T_co]):
     def __eq__(self, other: object) -> bool: ...
     def isdisjoint(self, other: Iterable[Any]) -> bool: ...
 
-class MutableSet(AbstractSet[_T], Generic[_T]):
+class MutableSet(AbstractSet[_T]):
     @abstractmethod
     def add(self, value: _T) -> None: ...
     @abstractmethod
@@ -646,7 +646,7 @@ class Mapping(Collection[_KT], Generic[_KT, _VT_co]):
     def __contains__(self, __key: object) -> bool: ...
     def __eq__(self, __other: object) -> bool: ...
 
-class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]):
+class MutableMapping(Mapping[_KT, _VT]):
     @abstractmethod
     def __setitem__(self, __key: _KT, __value: _VT) -> None: ...
     @abstractmethod
@@ -703,7 +703,7 @@ TYPE_CHECKING: bool
 # In stubs, the arguments of the IO class are marked as positional-only.
 # This differs from runtime, but better reflects the fact that in reality
 # classes deriving from IO use different names for the arguments.
-class IO(Iterator[AnyStr], Generic[AnyStr]):
+class IO(Iterator[AnyStr]):
     # At runtime these are all abstract properties,
     # but making them abstract in the stub is hugely disruptive, for not much gain.
     # See #8726
diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi
index ca5366602ceb..ae88f3a317c1 100644
--- a/mypy/typeshed/stdlib/weakref.pyi
+++ b/mypy/typeshed/stdlib/weakref.pyi
@@ -40,7 +40,7 @@ _P = ParamSpec("_P")
 
 ProxyTypes: tuple[type[Any], ...]
 
-class WeakMethod(ref[_CallableT], Generic[_CallableT]):
+class WeakMethod(ref[_CallableT]):
     def __new__(cls, meth: _CallableT, callback: Callable[[Self], object] | None = None) -> Self: ...
     def __call__(self) -> _CallableT | None: ...
     def __eq__(self, other: object) -> bool: ...

From 14f79c1c6d499df94a0cff3ee627582c13a80a2b Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 1 Nov 2023 09:27:53 +0000
Subject: [PATCH 222/288] Don't show docs links for plugin error codes (#16383)

Fixes https://github.com/python/mypy/issues/16375
---
 mypy/errorcodes.py | 3 +++
 mypy/errors.py     | 3 ++-
 2 files changed, 5 insertions(+), 1 deletion(-)

diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py
index 98600679da53..c6e9de9f31c1 100644
--- a/mypy/errorcodes.py
+++ b/mypy/errorcodes.py
@@ -274,3 +274,6 @@ def __hash__(self) -> int:
     "General",
     sub_code_of=MISC,
 )
+
+# This copy will not include any error codes defined later in the plugins.
+mypy_error_codes = error_codes.copy()
diff --git a/mypy/errors.py b/mypy/errors.py
index 4e62a48aeb27..6e90c28d9c03 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -8,7 +8,7 @@
 from typing_extensions import Literal, TypeAlias as _TypeAlias
 
 from mypy import errorcodes as codes
-from mypy.errorcodes import IMPORT, IMPORT_NOT_FOUND, IMPORT_UNTYPED, ErrorCode
+from mypy.errorcodes import IMPORT, IMPORT_NOT_FOUND, IMPORT_UNTYPED, ErrorCode, mypy_error_codes
 from mypy.message_registry import ErrorMessage
 from mypy.options import Options
 from mypy.scope import Scope
@@ -560,6 +560,7 @@ def add_error_info(self, info: ErrorInfo) -> None:
             and not self.options.hide_error_codes
             and info.code is not None
             and info.code not in HIDE_LINK_CODES
+            and info.code.code in mypy_error_codes
         ):
             message = f"See {BASE_RTD_URL}-{info.code.code} for more info"
             if message in self.only_once_messages:

From 6a8365484e62b9f05817f04aec144d9b783442fe Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 1 Nov 2023 10:44:58 +0000
Subject: [PATCH 223/288] Fix crash on unpack call special-casing (#16381)

Fixes https://github.com/python/mypy/issues/16380

Fix is quite straightforward, what was an `assert` really needs to be an
`if`.

---------

Co-authored-by: Jelle Zijlstra <jelle.zijlstra@gmail.com>
---
 mypy/checkexpr.py                       | 38 +++++++++++--------------
 test-data/unit/check-typevar-tuple.test | 22 ++++++++++++++
 2 files changed, 38 insertions(+), 22 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 0207c245b1f9..95700a52af02 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -2440,34 +2440,28 @@ def check_argument_types(
             # the suffices to the tuple, e.g. a single actual like
             # Tuple[Unpack[Ts], int]
             expanded_tuple = False
+            actual_kinds = [arg_kinds[a] for a in actuals]
             if len(actuals) > 1:
-                first_actual_arg_type = get_proper_type(arg_types[actuals[0]])
+                p_actual_type = get_proper_type(arg_types[actuals[0]])
                 if (
-                    isinstance(first_actual_arg_type, TupleType)
-                    and len(first_actual_arg_type.items) == 1
-                    and isinstance(first_actual_arg_type.items[0], UnpackType)
+                    isinstance(p_actual_type, TupleType)
+                    and len(p_actual_type.items) == 1
+                    and isinstance(p_actual_type.items[0], UnpackType)
+                    and actual_kinds == [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1)
                 ):
-                    # TODO: use walrus operator
-                    actual_types = [first_actual_arg_type.items[0]] + [
-                        arg_types[a] for a in actuals[1:]
-                    ]
-                    actual_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1)
-
-                    # If we got here, the callee was previously inferred to have a suffix.
-                    assert isinstance(orig_callee_arg_type, UnpackType)
-                    assert isinstance(orig_callee_arg_type.type, ProperType) and isinstance(
-                        orig_callee_arg_type.type, TupleType
-                    )
-                    assert orig_callee_arg_type.type.items
-                    callee_arg_types = orig_callee_arg_type.type.items
-                    callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (
-                        len(orig_callee_arg_type.type.items) - 1
-                    )
-                    expanded_tuple = True
+                    actual_types = [p_actual_type.items[0]] + [arg_types[a] for a in actuals[1:]]
+                    if isinstance(orig_callee_arg_type, UnpackType):
+                        p_callee_type = get_proper_type(orig_callee_arg_type.type)
+                        if isinstance(p_callee_type, TupleType):
+                            assert p_callee_type.items
+                            callee_arg_types = p_callee_type.items
+                            callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (
+                                len(p_callee_type.items) - 1
+                            )
+                            expanded_tuple = True
 
             if not expanded_tuple:
                 actual_types = [arg_types[a] for a in actuals]
-                actual_kinds = [arg_kinds[a] for a in actuals]
                 if isinstance(orig_callee_arg_type, UnpackType):
                     unpacked_type = get_proper_type(orig_callee_arg_type.type)
                     if isinstance(unpacked_type, TupleType):
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index a51b535a873c..e85863f0ed04 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -2185,3 +2185,25 @@ def test2(
                      # E: Missing named argument "b"
     return func(*args, **kwargs)
 [builtins fixtures/tuple.pyi]
+
+[case testUnpackTupleSpecialCaseNoCrash]
+from typing import Tuple, TypeVar
+from typing_extensions import Unpack
+
+T = TypeVar("T")
+
+def foo(*x: object) -> None: ...
+def bar(*x: int) -> None: ...
+def baz(*x: T) -> T: ...
+
+keys: Tuple[Unpack[Tuple[int, ...]]]
+
+foo(keys, 1)
+foo(*keys, 1)
+
+bar(keys, 1)  # E: Argument 1 to "bar" has incompatible type "Tuple[Unpack[Tuple[int, ...]]]"; expected "int"
+bar(*keys, 1)  # OK
+
+reveal_type(baz(keys, 1))  # N: Revealed type is "builtins.object"
+reveal_type(baz(*keys, 1))  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]

From 371219347a6d17e16924bbabf3e693c6874e7138 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 1 Nov 2023 15:33:45 +0000
Subject: [PATCH 224/288] Fix file reloading in dmypy with --export-types
 (#16359)

Fixes https://github.com/python/mypy/issues/15794

Unfortunately, this requires to pass `--export-types` to `dmypy run` if
one wants to inspect a file that was previously kicked out of the build.
---
 mypy/dmypy_server.py         | 52 +++++++++++++++++++++++++++++++-----
 mypy/test/testfinegrained.py |  3 ++-
 test-data/unit/daemon.test   | 27 +++++++++++++++++++
 3 files changed, 74 insertions(+), 8 deletions(-)

diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py
index 0db349b5bf82..42236497f275 100644
--- a/mypy/dmypy_server.py
+++ b/mypy/dmypy_server.py
@@ -393,15 +393,21 @@ def cmd_recheck(
         t1 = time.time()
         manager = self.fine_grained_manager.manager
         manager.log(f"fine-grained increment: cmd_recheck: {t1 - t0:.3f}s")
-        self.options.export_types = export_types
+        old_export_types = self.options.export_types
+        self.options.export_types = self.options.export_types or export_types
         if not self.following_imports():
-            messages = self.fine_grained_increment(sources, remove, update)
+            messages = self.fine_grained_increment(
+                sources, remove, update, explicit_export_types=export_types
+            )
         else:
             assert remove is None and update is None
-            messages = self.fine_grained_increment_follow_imports(sources)
+            messages = self.fine_grained_increment_follow_imports(
+                sources, explicit_export_types=export_types
+            )
         res = self.increment_output(messages, sources, is_tty, terminal_width)
         self.flush_caches()
         self.update_stats(res)
+        self.options.export_types = old_export_types
         return res
 
     def check(
@@ -412,17 +418,21 @@ def check(
         If is_tty is True format the output nicely with colors and summary line
         (unless disabled in self.options). Also pass the terminal_width to formatter.
         """
-        self.options.export_types = export_types
+        old_export_types = self.options.export_types
+        self.options.export_types = self.options.export_types or export_types
         if not self.fine_grained_manager:
             res = self.initialize_fine_grained(sources, is_tty, terminal_width)
         else:
             if not self.following_imports():
-                messages = self.fine_grained_increment(sources)
+                messages = self.fine_grained_increment(sources, explicit_export_types=export_types)
             else:
-                messages = self.fine_grained_increment_follow_imports(sources)
+                messages = self.fine_grained_increment_follow_imports(
+                    sources, explicit_export_types=export_types
+                )
             res = self.increment_output(messages, sources, is_tty, terminal_width)
         self.flush_caches()
         self.update_stats(res)
+        self.options.export_types = old_export_types
         return res
 
     def flush_caches(self) -> None:
@@ -535,6 +545,7 @@ def fine_grained_increment(
         sources: list[BuildSource],
         remove: list[str] | None = None,
         update: list[str] | None = None,
+        explicit_export_types: bool = False,
     ) -> list[str]:
         """Perform a fine-grained type checking increment.
 
@@ -545,6 +556,8 @@ def fine_grained_increment(
             sources: sources passed on the command line
             remove: paths of files that have been removed
             update: paths of files that have been changed or created
+            explicit_export_types: --export-type was passed in a check command
+              (as opposite to being set in dmypy start)
         """
         assert self.fine_grained_manager is not None
         manager = self.fine_grained_manager.manager
@@ -559,6 +572,10 @@ def fine_grained_increment(
             # Use the remove/update lists to update fswatcher.
             # This avoids calling stat() for unchanged files.
             changed, removed = self.update_changed(sources, remove or [], update or [])
+        if explicit_export_types:
+            # If --export-types is given, we need to force full re-checking of all
+            # explicitly passed files, since we need to visit each expression.
+            add_all_sources_to_changed(sources, changed)
         changed += self.find_added_suppressed(
             self.fine_grained_manager.graph, set(), manager.search_paths
         )
@@ -577,7 +594,9 @@ def fine_grained_increment(
         self.previous_sources = sources
         return messages
 
-    def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> list[str]:
+    def fine_grained_increment_follow_imports(
+        self, sources: list[BuildSource], explicit_export_types: bool = False
+    ) -> list[str]:
         """Like fine_grained_increment, but follow imports."""
         t0 = time.time()
 
@@ -603,6 +622,9 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l
         changed, new_files = self.find_reachable_changed_modules(
             sources, graph, seen, changed_paths
         )
+        if explicit_export_types:
+            # Same as in fine_grained_increment().
+            add_all_sources_to_changed(sources, changed)
         sources.extend(new_files)
 
         # Process changes directly reachable from roots.
@@ -1011,6 +1033,22 @@ def find_all_sources_in_build(
     return result
 
 
+def add_all_sources_to_changed(sources: list[BuildSource], changed: list[tuple[str, str]]) -> None:
+    """Add all (explicit) sources to the list changed files in place.
+
+    Use this when re-processing of unchanged files is needed (e.g. for
+    the purpose of exporting types for inspections).
+    """
+    changed_set = set(changed)
+    changed.extend(
+        [
+            (bs.module, bs.path)
+            for bs in sources
+            if bs.path and (bs.module, bs.path) not in changed_set
+        ]
+    )
+
+
 def fix_module_deps(graph: mypy.build.Graph) -> None:
     """After an incremental update, update module dependencies to reflect the new state.
 
diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py
index 953f91a60df7..f61a58c425fc 100644
--- a/mypy/test/testfinegrained.py
+++ b/mypy/test/testfinegrained.py
@@ -149,6 +149,7 @@ def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bo
         options.use_fine_grained_cache = self.use_cache and not build_cache
         options.cache_fine_grained = self.use_cache
         options.local_partial_types = True
+        options.export_types = "inspect" in testcase.file
         # Treat empty bodies safely for these test cases.
         options.allow_empty_bodies = not testcase.name.endswith("_no_empty")
         if re.search("flags:.*--follow-imports", source) is None:
@@ -163,7 +164,7 @@ def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bo
         return options
 
     def run_check(self, server: Server, sources: list[BuildSource]) -> list[str]:
-        response = server.check(sources, export_types=True, is_tty=False, terminal_width=-1)
+        response = server.check(sources, export_types=False, is_tty=False, terminal_width=-1)
         out = response["out"] or response["err"]
         assert isinstance(out, str)
         return out.splitlines()
diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test
index 77367eb02bfe..ca2c969d2f5e 100644
--- a/test-data/unit/daemon.test
+++ b/test-data/unit/daemon.test
@@ -360,6 +360,33 @@ def bar() -> None:
     x = foo('abc')  # type: str
     foo(arg='xyz')
 
+[case testDaemonInspectCheck]
+$ dmypy start
+Daemon started
+$ dmypy check foo.py
+Success: no issues found in 1 source file
+$ dmypy check foo.py --export-types
+Success: no issues found in 1 source file
+$ dmypy inspect foo.py:1:1
+"int"
+[file foo.py]
+x = 1
+
+[case testDaemonInspectRun]
+$ dmypy run test1.py
+Daemon started
+Success: no issues found in 1 source file
+$ dmypy run test2.py
+Success: no issues found in 1 source file
+$ dmypy run test1.py --export-types
+Success: no issues found in 1 source file
+$ dmypy inspect test1.py:1:1
+"int"
+[file test1.py]
+a: int
+[file test2.py]
+a: str
+
 [case testDaemonGetType]
 $ dmypy start --log-file log.txt -- --follow-imports=error --no-error-summary --python-version 3.8
 Daemon started

From 44e527a3ea2d4bc66565c64edf837c3560eacb3e Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 2 Nov 2023 23:45:04 +0000
Subject: [PATCH 225/288] Fix strict-optional in extending generic TypedDict
 (#16398)

Fixes https://github.com/python/mypy/issues/16395
---
 mypy/semanal_typeddict.py           |  4 +++-
 test-data/unit/check-typeddict.test | 17 +++++++++++++++++
 2 files changed, 20 insertions(+), 1 deletion(-)

diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py
index e9aaee55879a..5104d31f5c26 100644
--- a/mypy/semanal_typeddict.py
+++ b/mypy/semanal_typeddict.py
@@ -37,6 +37,7 @@
     has_placeholder,
     require_bool_literal_argument,
 )
+from mypy.state import state
 from mypy.typeanal import check_for_explicit_any, has_any_from_unimported_type
 from mypy.types import (
     TPDICT_NAMES,
@@ -203,7 +204,8 @@ def add_keys_and_types_from_base(
                 any_kind = TypeOfAny.from_error
             base_args = [AnyType(any_kind) for _ in tvars]
 
-        valid_items = self.map_items_to_base(valid_items, tvars, base_args)
+        with state.strict_optional_set(self.options.strict_optional):
+            valid_items = self.map_items_to_base(valid_items, tvars, base_args)
         for key in base_items:
             if key in keys:
                 self.fail(f'Overwriting TypedDict field "{key}" while merging', ctx)
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index 088b52db0473..c1c791304a15 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -3379,3 +3379,20 @@ bar |= d1  # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Di
 bar |= d2  # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Dict[int, str]"; expected "TypedDict({'key'?: int, 'value'?: str})"
 [builtins fixtures/dict.pyi]
 [typing fixtures/typing-typeddict-iror.pyi]
+
+[case testGenericTypedDictStrictOptionalExtending]
+from typing import Generic, TypeVar, TypedDict, Optional
+
+T = TypeVar("T")
+class Foo(TypedDict, Generic[T], total=False):
+    a: Optional[str]
+    g: Optional[T]
+
+class Bar(Foo[T], total=False):
+    other: str
+
+b: Bar[int]
+reveal_type(b["a"])  # N: Revealed type is "Union[builtins.str, None]"
+reveal_type(b["g"])  # N: Revealed type is "Union[builtins.int, None]"
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-typeddict.pyi]

From 93e6de4f1c3c1b1ed6c9a54e1c4f355443912a63 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Fri, 3 Nov 2023 12:01:54 +0300
Subject: [PATCH 226/288] Improve error messages for `super` checks and add
 more tests (#16393)

Now all messages use the same `"super"` formatting, it used to be a bit
different.
---
 mypy/message_registry.py        |  4 ++--
 test-data/unit/check-super.test | 21 +++++++++++++++++----
 2 files changed, 19 insertions(+), 6 deletions(-)

diff --git a/mypy/message_registry.py b/mypy/message_registry.py
index dc46eb503390..93581d5aca90 100644
--- a/mypy/message_registry.py
+++ b/mypy/message_registry.py
@@ -206,10 +206,10 @@ def with_additional_msg(self, info: str) -> ErrorMessage:
 )
 TARGET_CLASS_HAS_NO_BASE_CLASS: Final = ErrorMessage("Target class has no base class")
 SUPER_OUTSIDE_OF_METHOD_NOT_SUPPORTED: Final = ErrorMessage(
-    "super() outside of a method is not supported"
+    '"super()" outside of a method is not supported'
 )
 SUPER_ENCLOSING_POSITIONAL_ARGS_REQUIRED: Final = ErrorMessage(
-    "super() requires one or more positional arguments in enclosing function"
+    '"super()" requires one or two positional arguments in enclosing function'
 )
 
 # Self-type
diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test
index 48a0a0250ecf..8816322a270a 100644
--- a/test-data/unit/check-super.test
+++ b/test-data/unit/check-super.test
@@ -280,8 +280,12 @@ class B(A):
 
 
 [case testSuperOutsideMethodNoCrash]
-class C:
-    a = super().whatever  # E: super() outside of a method is not supported
+class A:
+    x = 1
+class B(A): pass
+class C(B):
+    b = super(B, B).x
+    a = super().whatever  # E: "super()" outside of a method is not supported
 
 [case testSuperWithObjectClassAsFirstArgument]
 class A:
@@ -366,13 +370,22 @@ class C(B):
 [case testSuperInMethodWithNoArguments]
 class A:
     def f(self) -> None: pass
+    @staticmethod
+    def st() -> int:
+        return 1
 
 class B(A):
     def g() -> None: # E: Method must have at least one argument. Did you forget the "self" argument?
-        super().f() # E: super() requires one or more positional arguments in enclosing function
+        super().f() # E: "super()" requires one or two positional arguments in enclosing function
     def h(self) -> None:
         def a() -> None:
-            super().f() # E: super() requires one or more positional arguments in enclosing function
+            super().f() # E: "super()" requires one or two positional arguments in enclosing function
+    @staticmethod
+    def st() -> int:
+        reveal_type(super(B, B).st())  # N: Revealed type is "builtins.int"
+        super().st() # E: "super()" requires one or two positional arguments in enclosing function
+        return 2
+[builtins fixtures/staticmethod.pyi]
 
 [case testSuperWithUnsupportedTypeObject]
 from typing import Type

From 8c57df01386f3e29d877ca190dc4c5e5af7b92a1 Mon Sep 17 00:00:00 2001
From: Matthew Wright <mrwright@dropbox.com>
Date: Fri, 3 Nov 2023 07:50:36 -0500
Subject: [PATCH 227/288] Allow mypy to output a junit file with per-file
 results (#16388)

Adds a new `--junit-format` flag to MyPy, which affects the structure of
the junit file written when `--junit-xml` is specified (it has no effect
when not writing a junit file). This flag can take `global` or
`per_file` as values:
* `--junit-format=global` (the default) preserves the existing junit
structure, creating a junit file specifying a single "test" for the
entire mypy run.
* `--junit-format=per_file` will cause the junit file to have one test
entry per file with failures (or a single entry, as in the existing
behavior, in the case when typechecking passes).

In some settings it can be useful to know which files had typechecking
failures (for example, a CI system might want to display failures by
file); while that information can be parsed out of the error messages in
the existing junit files, it's much more convenient to have that
represented in the junit structure.

Tests for the old and new junit structure have been added.
---
 mypy/build.py                | 18 ++++---
 mypy/config_parser.py        | 13 +++++
 mypy/main.py                 | 38 ++++++++++++---
 mypy/options.py              |  2 +
 mypy/test/testerrorstream.py |  2 +-
 mypy/test/testgraph.py       |  2 +-
 mypy/test/testutil.py        | 69 ++++++++++++++++++++++++++-
 mypy/util.py                 | 92 ++++++++++++++++++++++++++----------
 mypyc/build.py               |  4 +-
 9 files changed, 197 insertions(+), 43 deletions(-)

diff --git a/mypy/build.py b/mypy/build.py
index 605368a6dc51..961198fc2fa4 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -145,7 +145,7 @@ def build(
     sources: list[BuildSource],
     options: Options,
     alt_lib_path: str | None = None,
-    flush_errors: Callable[[list[str], bool], None] | None = None,
+    flush_errors: Callable[[str | None, list[str], bool], None] | None = None,
     fscache: FileSystemCache | None = None,
     stdout: TextIO | None = None,
     stderr: TextIO | None = None,
@@ -177,7 +177,9 @@ def build(
     # fields for callers that want the traditional API.
     messages = []
 
-    def default_flush_errors(new_messages: list[str], is_serious: bool) -> None:
+    def default_flush_errors(
+        filename: str | None, new_messages: list[str], is_serious: bool
+    ) -> None:
         messages.extend(new_messages)
 
     flush_errors = flush_errors or default_flush_errors
@@ -197,7 +199,7 @@ def default_flush_errors(new_messages: list[str], is_serious: bool) -> None:
         # Patch it up to contain either none or all none of the messages,
         # depending on whether we are flushing errors.
         serious = not e.use_stdout
-        flush_errors(e.messages, serious)
+        flush_errors(None, e.messages, serious)
         e.messages = messages
         raise
 
@@ -206,7 +208,7 @@ def _build(
     sources: list[BuildSource],
     options: Options,
     alt_lib_path: str | None,
-    flush_errors: Callable[[list[str], bool], None],
+    flush_errors: Callable[[str | None, list[str], bool], None],
     fscache: FileSystemCache | None,
     stdout: TextIO,
     stderr: TextIO,
@@ -600,7 +602,7 @@ def __init__(
         plugin: Plugin,
         plugins_snapshot: dict[str, str],
         errors: Errors,
-        flush_errors: Callable[[list[str], bool], None],
+        flush_errors: Callable[[str | None, list[str], bool], None],
         fscache: FileSystemCache,
         stdout: TextIO,
         stderr: TextIO,
@@ -3458,7 +3460,11 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No
         for id in stale:
             graph[id].transitive_error = True
     for id in stale:
-        manager.flush_errors(manager.errors.file_messages(graph[id].xpath), False)
+        manager.flush_errors(
+            manager.errors.simplify_path(graph[id].xpath),
+            manager.errors.file_messages(graph[id].xpath),
+            False,
+        )
         graph[id].write_cache()
         graph[id].mark_as_rechecked()
 
diff --git a/mypy/config_parser.py b/mypy/config_parser.py
index 4dbd6477c81e..a6bf021000c1 100644
--- a/mypy/config_parser.py
+++ b/mypy/config_parser.py
@@ -152,6 +152,17 @@ def check_follow_imports(choice: str) -> str:
     return choice
 
 
+def check_junit_format(choice: str) -> str:
+    choices = ["global", "per_file"]
+    if choice not in choices:
+        raise argparse.ArgumentTypeError(
+            "invalid choice '{}' (choose from {})".format(
+                choice, ", ".join(f"'{x}'" for x in choices)
+            )
+        )
+    return choice
+
+
 def split_commas(value: str) -> list[str]:
     # Uses a bit smarter technique to allow last trailing comma
     # and to remove last `""` item from the split.
@@ -173,6 +184,7 @@ def split_commas(value: str) -> list[str]:
     "files": split_and_match_files,
     "quickstart_file": expand_path,
     "junit_xml": expand_path,
+    "junit_format": check_junit_format,
     "follow_imports": check_follow_imports,
     "no_site_packages": bool,
     "plugins": lambda s: [p.strip() for p in split_commas(s)],
@@ -200,6 +212,7 @@ def split_commas(value: str) -> list[str]:
         "python_version": parse_version,
         "mypy_path": lambda s: [expand_path(p) for p in try_split(s, "[,:]")],
         "files": lambda s: split_and_match_files_list(try_split(s)),
+        "junit_format": lambda s: check_junit_format(str(s)),
         "follow_imports": lambda s: check_follow_imports(str(s)),
         "plugins": try_split,
         "always_true": try_split,
diff --git a/mypy/main.py b/mypy/main.py
index 1aede530c33e..5e0dc17c668a 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -7,6 +7,7 @@
 import subprocess
 import sys
 import time
+from collections import defaultdict
 from gettext import gettext
 from typing import IO, Any, Final, NoReturn, Sequence, TextIO
 
@@ -158,11 +159,14 @@ def run_build(
     formatter = util.FancyFormatter(stdout, stderr, options.hide_error_codes)
 
     messages = []
+    messages_by_file = defaultdict(list)
 
-    def flush_errors(new_messages: list[str], serious: bool) -> None:
+    def flush_errors(filename: str | None, new_messages: list[str], serious: bool) -> None:
         if options.pretty:
             new_messages = formatter.fit_in_terminal(new_messages)
         messages.extend(new_messages)
+        if new_messages:
+            messages_by_file[filename].extend(new_messages)
         if options.non_interactive:
             # Collect messages and possibly show them later.
             return
@@ -200,7 +204,7 @@ def flush_errors(new_messages: list[str], serious: bool) -> None:
             ),
             file=stderr,
         )
-    maybe_write_junit_xml(time.time() - t0, serious, messages, options)
+    maybe_write_junit_xml(time.time() - t0, serious, messages, messages_by_file, options)
     return res, messages, blockers
 
 
@@ -1054,6 +1058,12 @@ def add_invertible_flag(
     other_group = parser.add_argument_group(title="Miscellaneous")
     other_group.add_argument("--quickstart-file", help=argparse.SUPPRESS)
     other_group.add_argument("--junit-xml", help="Write junit.xml to the given file")
+    imports_group.add_argument(
+        "--junit-format",
+        choices=["global", "per_file"],
+        default="global",
+        help="If --junit-xml is set, specifies format. global: single test with all errors; per_file: one test entry per file with failures",
+    )
     other_group.add_argument(
         "--find-occurrences",
         metavar="CLASS.MEMBER",
@@ -1483,18 +1493,32 @@ def process_cache_map(
         options.cache_map[source] = (meta_file, data_file)
 
 
-def maybe_write_junit_xml(td: float, serious: bool, messages: list[str], options: Options) -> None:
+def maybe_write_junit_xml(
+    td: float,
+    serious: bool,
+    all_messages: list[str],
+    messages_by_file: dict[str | None, list[str]],
+    options: Options,
+) -> None:
     if options.junit_xml:
         py_version = f"{options.python_version[0]}_{options.python_version[1]}"
-        util.write_junit_xml(
-            td, serious, messages, options.junit_xml, py_version, options.platform
-        )
+        if options.junit_format == "global":
+            util.write_junit_xml(
+                td, serious, {None: all_messages}, options.junit_xml, py_version, options.platform
+            )
+        else:
+            # per_file
+            util.write_junit_xml(
+                td, serious, messages_by_file, options.junit_xml, py_version, options.platform
+            )
 
 
 def fail(msg: str, stderr: TextIO, options: Options) -> NoReturn:
     """Fail with a serious error."""
     stderr.write(f"{msg}\n")
-    maybe_write_junit_xml(0.0, serious=True, messages=[msg], options=options)
+    maybe_write_junit_xml(
+        0.0, serious=True, all_messages=[msg], messages_by_file={None: [msg]}, options=options
+    )
     sys.exit(2)
 
 
diff --git a/mypy/options.py b/mypy/options.py
index 8bb20dbd4410..38a87e423766 100644
--- a/mypy/options.py
+++ b/mypy/options.py
@@ -255,6 +255,8 @@ def __init__(self) -> None:
         # Write junit.xml to given file
         self.junit_xml: str | None = None
 
+        self.junit_format: str = "global"  # global|per_file
+
         # Caching and incremental checking options
         self.incremental = True
         self.cache_dir = defaults.CACHE_DIR
diff --git a/mypy/test/testerrorstream.py b/mypy/test/testerrorstream.py
index 4b98f10fc9ca..5ed112fd31e7 100644
--- a/mypy/test/testerrorstream.py
+++ b/mypy/test/testerrorstream.py
@@ -29,7 +29,7 @@ def test_error_stream(testcase: DataDrivenTestCase) -> None:
 
     logged_messages: list[str] = []
 
-    def flush_errors(msgs: list[str], serious: bool) -> None:
+    def flush_errors(filename: str | None, msgs: list[str], serious: bool) -> None:
         if msgs:
             logged_messages.append("==== Errors flushed ====")
             logged_messages.extend(msgs)
diff --git a/mypy/test/testgraph.py b/mypy/test/testgraph.py
index b0d148d5ae9c..0355e75e8c34 100644
--- a/mypy/test/testgraph.py
+++ b/mypy/test/testgraph.py
@@ -50,7 +50,7 @@ def _make_manager(self) -> BuildManager:
             plugin=Plugin(options),
             plugins_snapshot={},
             errors=errors,
-            flush_errors=lambda msgs, serious: None,
+            flush_errors=lambda filename, msgs, serious: None,
             fscache=fscache,
             stdout=sys.stdout,
             stderr=sys.stderr,
diff --git a/mypy/test/testutil.py b/mypy/test/testutil.py
index 571e4d0b11f2..d0d54ffec8c6 100644
--- a/mypy/test/testutil.py
+++ b/mypy/test/testutil.py
@@ -4,7 +4,7 @@
 from unittest import TestCase, mock
 
 from mypy.inspections import parse_location
-from mypy.util import get_terminal_width
+from mypy.util import _generate_junit_contents, get_terminal_width
 
 
 class TestGetTerminalSize(TestCase):
@@ -20,3 +20,70 @@ def test_get_terminal_size_in_pty_defaults_to_80(self) -> None:
     def test_parse_location_windows(self) -> None:
         assert parse_location(r"C:\test.py:1:1") == (r"C:\test.py", [1, 1])
         assert parse_location(r"C:\test.py:1:1:1:1") == (r"C:\test.py", [1, 1, 1, 1])
+
+
+class TestWriteJunitXml(TestCase):
+    def test_junit_pass(self) -> None:
+        serious = False
+        messages_by_file: dict[str | None, list[str]] = {}
+        expected = """<?xml version="1.0" encoding="utf-8"?>
+<testsuite errors="0" failures="0" name="mypy" skips="0" tests="1" time="1.230">
+  <testcase classname="mypy" file="mypy" line="1" name="mypy-py3.14-test-plat" time="1.230">
+  </testcase>
+</testsuite>
+"""
+        result = _generate_junit_contents(
+            dt=1.23,
+            serious=serious,
+            messages_by_file=messages_by_file,
+            version="3.14",
+            platform="test-plat",
+        )
+        assert result == expected
+
+    def test_junit_fail_two_files(self) -> None:
+        serious = False
+        messages_by_file: dict[str | None, list[str]] = {
+            "file1.py": ["Test failed", "another line"],
+            "file2.py": ["Another failure", "line 2"],
+        }
+        expected = """<?xml version="1.0" encoding="utf-8"?>
+<testsuite errors="0" failures="2" name="mypy" skips="0" tests="2" time="1.230">
+  <testcase classname="mypy" file="file1.py" line="1" name="mypy-py3.14-test-plat file1.py" time="1.230">
+    <failure message="mypy produced messages">Test failed
+another line</failure>
+  </testcase>
+  <testcase classname="mypy" file="file2.py" line="1" name="mypy-py3.14-test-plat file2.py" time="1.230">
+    <failure message="mypy produced messages">Another failure
+line 2</failure>
+  </testcase>
+</testsuite>
+"""
+        result = _generate_junit_contents(
+            dt=1.23,
+            serious=serious,
+            messages_by_file=messages_by_file,
+            version="3.14",
+            platform="test-plat",
+        )
+        assert result == expected
+
+    def test_serious_error(self) -> None:
+        serious = True
+        messages_by_file: dict[str | None, list[str]] = {None: ["Error line 1", "Error line 2"]}
+        expected = """<?xml version="1.0" encoding="utf-8"?>
+<testsuite errors="1" failures="0" name="mypy" skips="0" tests="1" time="1.230">
+  <testcase classname="mypy" file="mypy" line="1" name="mypy-py3.14-test-plat" time="1.230">
+    <failure message="mypy produced messages">Error line 1
+Error line 2</failure>
+  </testcase>
+</testsuite>
+"""
+        result = _generate_junit_contents(
+            dt=1.23,
+            serious=serious,
+            messages_by_file=messages_by_file,
+            version="3.14",
+            platform="test-plat",
+        )
+        assert result == expected
diff --git a/mypy/util.py b/mypy/util.py
index d0f2f8c6cc36..7a13de427e8e 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -234,45 +234,85 @@ def get_mypy_comments(source: str) -> list[tuple[int, str]]:
     return results
 
 
-PASS_TEMPLATE: Final = """<?xml version="1.0" encoding="utf-8"?>
-<testsuite errors="0" failures="0" name="mypy" skips="0" tests="1" time="{time:.3f}">
-  <testcase classname="mypy" file="mypy" line="1" name="mypy-py{ver}-{platform}" time="{time:.3f}">
-  </testcase>
-</testsuite>
+JUNIT_HEADER_TEMPLATE: Final = """<?xml version="1.0" encoding="utf-8"?>
+<testsuite errors="{errors}" failures="{failures}" name="mypy" skips="0" tests="{tests}" time="{time:.3f}">
 """
 
-FAIL_TEMPLATE: Final = """<?xml version="1.0" encoding="utf-8"?>
-<testsuite errors="0" failures="1" name="mypy" skips="0" tests="1" time="{time:.3f}">
-  <testcase classname="mypy" file="mypy" line="1" name="mypy-py{ver}-{platform}" time="{time:.3f}">
+JUNIT_TESTCASE_FAIL_TEMPLATE: Final = """  <testcase classname="mypy" file="{filename}" line="1" name="{name}" time="{time:.3f}">
     <failure message="mypy produced messages">{text}</failure>
   </testcase>
-</testsuite>
 """
 
-ERROR_TEMPLATE: Final = """<?xml version="1.0" encoding="utf-8"?>
-<testsuite errors="1" failures="0" name="mypy" skips="0" tests="1" time="{time:.3f}">
-  <testcase classname="mypy" file="mypy" line="1" name="mypy-py{ver}-{platform}" time="{time:.3f}">
+JUNIT_ERROR_TEMPLATE: Final = """  <testcase classname="mypy" file="mypy" line="1" name="mypy-py{ver}-{platform}" time="{time:.3f}">
     <error message="mypy produced errors">{text}</error>
   </testcase>
-</testsuite>
 """
 
+JUNIT_TESTCASE_PASS_TEMPLATE: Final = """  <testcase classname="mypy" file="mypy" line="1" name="mypy-py{ver}-{platform}" time="{time:.3f}">
+  </testcase>
+"""
 
-def write_junit_xml(
-    dt: float, serious: bool, messages: list[str], path: str, version: str, platform: str
-) -> None:
-    from xml.sax.saxutils import escape
+JUNIT_FOOTER: Final = """</testsuite>
+"""
 
-    if not messages and not serious:
-        xml = PASS_TEMPLATE.format(time=dt, ver=version, platform=platform)
-    elif not serious:
-        xml = FAIL_TEMPLATE.format(
-            text=escape("\n".join(messages)), time=dt, ver=version, platform=platform
-        )
+
+def _generate_junit_contents(
+    dt: float,
+    serious: bool,
+    messages_by_file: dict[str | None, list[str]],
+    version: str,
+    platform: str,
+) -> str:
+    if serious:
+        failures = 0
+        errors = len(messages_by_file)
     else:
-        xml = ERROR_TEMPLATE.format(
-            text=escape("\n".join(messages)), time=dt, ver=version, platform=platform
-        )
+        failures = len(messages_by_file)
+        errors = 0
+
+    xml = JUNIT_HEADER_TEMPLATE.format(
+        errors=errors,
+        failures=failures,
+        time=dt,
+        # If there are no messages, we still write one "test" indicating success.
+        tests=len(messages_by_file) or 1,
+    )
+
+    if not messages_by_file:
+        xml += JUNIT_TESTCASE_PASS_TEMPLATE.format(time=dt, ver=version, platform=platform)
+    else:
+        for filename, messages in messages_by_file.items():
+            if filename is not None:
+                xml += JUNIT_TESTCASE_FAIL_TEMPLATE.format(
+                    text="\n".join(messages),
+                    filename=filename,
+                    time=dt,
+                    name="mypy-py{ver}-{platform} {filename}".format(
+                        ver=version, platform=platform, filename=filename
+                    ),
+                )
+            else:
+                xml += JUNIT_TESTCASE_FAIL_TEMPLATE.format(
+                    text="\n".join(messages),
+                    filename="mypy",
+                    time=dt,
+                    name="mypy-py{ver}-{platform}".format(ver=version, platform=platform),
+                )
+
+    xml += JUNIT_FOOTER
+
+    return xml
+
+
+def write_junit_xml(
+    dt: float,
+    serious: bool,
+    messages_by_file: dict[str | None, list[str]],
+    path: str,
+    version: str,
+    platform: str,
+) -> None:
+    xml = _generate_junit_contents(dt, serious, messages_by_file, version, platform)
 
     # checks for a directory structure in path and creates folders if needed
     xml_dirs = os.path.dirname(os.path.abspath(path))
diff --git a/mypyc/build.py b/mypyc/build.py
index 0af8908e14d0..a9082df81945 100644
--- a/mypyc/build.py
+++ b/mypyc/build.py
@@ -105,7 +105,9 @@ def emit_messages(options: Options, messages: list[str], dt: float, serious: boo
     # ... you know, just in case.
     if options.junit_xml:
         py_version = f"{options.python_version[0]}_{options.python_version[1]}"
-        write_junit_xml(dt, serious, messages, options.junit_xml, py_version, options.platform)
+        write_junit_xml(
+            dt, serious, {None: messages}, options.junit_xml, py_version, options.platform
+        )
     if messages:
         print("\n".join(messages))
 

From 544e6ce119ec6bdd5eabab53a433264c98dc7d9c Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 4 Nov 2023 23:42:22 +0000
Subject: [PATCH 228/288] Fix type narrowing in lambda expressions (#16407)

Fixes https://github.com/python/mypy/issues/4297

Fix is straightforward: without properly pushing lambda expression on
the stack, the previous fix @JukkaL added for nested functions doesn't
work for lambdas (it thinks that we are at global scope).
---
 mypy/checkexpr.py                           |  3 ++-
 test-data/unit/check-inference-context.test | 13 +++++++++++++
 2 files changed, 15 insertions(+), 1 deletion(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 95700a52af02..056b2f7bd2c6 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -5195,7 +5195,8 @@ def visit_lambda_expr(self, e: LambdaExpr) -> Type:
         else:
             # Type context available.
             self.chk.return_types.append(inferred_type.ret_type)
-            self.chk.check_func_item(e, type_override=type_override)
+            with self.chk.tscope.function_scope(e):
+                self.chk.check_func_item(e, type_override=type_override)
             if not self.chk.has_type(e.expr()):
                 # TODO: return expression must be accepted before exiting function scope.
                 self.accept(e.expr(), allow_none_return=True)
diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test
index a933acbf7f32..afe6548df2d4 100644
--- a/test-data/unit/check-inference-context.test
+++ b/test-data/unit/check-inference-context.test
@@ -1482,3 +1482,16 @@ b: Any
 i = i if isinstance(i, int) else b
 reveal_type(i)  # N: Revealed type is "Union[Any, builtins.int]"
 [builtins fixtures/isinstance.pyi]
+
+[case testLambdaInferenceUsesNarrowedTypes]
+from typing import Optional, Callable
+
+def f1(key: Callable[[], str]) -> None: ...
+def f2(key: object) -> None: ...
+
+def g(b: Optional[str]) -> None:
+    if b:
+        f1(lambda: reveal_type(b))  # N: Revealed type is "builtins.str"
+        z: Callable[[], str] = lambda: reveal_type(b)  # N: Revealed type is "builtins.str"
+        f2(lambda: reveal_type(b))  # N: Revealed type is "builtins.str"
+        lambda: reveal_type(b)  # N: Revealed type is "builtins.str"

From 285519cca8c64f5fc35bde6bfa52c48d1fb1cea1 Mon Sep 17 00:00:00 2001
From: Matthew Wright <wrightm@gmail.com>
Date: Mon, 6 Nov 2023 20:59:57 -0600
Subject: [PATCH 229/288] Fix junit writing bug introduced in #16388 (#16417)

#16388 introduced a bug where, with `--junit-format=global`, the junit
file would indicate an error (with no message) even if everything
passed. That was because `_generate_junit_contents` would check if
`messages_by_file` was empty or not to determine if there were failures,
but with `--junit-format=global` we'd pass in a dictionary of the form
`{None: all_messages}`; `all_messages` would be empty, but the resulting
dictionary wouldn't be.

The fix is to pass in an empty dictionary if there are no messages.

I've tested manually with `--junit-format=global` and
`--junit-format=per_file` in the successful case to make sure the files
are written correctly now.
---
 mypy/main.py   | 7 ++++++-
 mypyc/build.py | 7 ++++++-
 2 files changed, 12 insertions(+), 2 deletions(-)

diff --git a/mypy/main.py b/mypy/main.py
index 5e0dc17c668a..8a35c2056963 100644
--- a/mypy/main.py
+++ b/mypy/main.py
@@ -1504,7 +1504,12 @@ def maybe_write_junit_xml(
         py_version = f"{options.python_version[0]}_{options.python_version[1]}"
         if options.junit_format == "global":
             util.write_junit_xml(
-                td, serious, {None: all_messages}, options.junit_xml, py_version, options.platform
+                td,
+                serious,
+                {None: all_messages} if all_messages else {},
+                options.junit_xml,
+                py_version,
+                options.platform,
             )
         else:
             # per_file
diff --git a/mypyc/build.py b/mypyc/build.py
index a9082df81945..485803acba46 100644
--- a/mypyc/build.py
+++ b/mypyc/build.py
@@ -106,7 +106,12 @@ def emit_messages(options: Options, messages: list[str], dt: float, serious: boo
     if options.junit_xml:
         py_version = f"{options.python_version[0]}_{options.python_version[1]}"
         write_junit_xml(
-            dt, serious, {None: messages}, options.junit_xml, py_version, options.platform
+            dt,
+            serious,
+            {None: messages} if messages else {},
+            options.junit_xml,
+            py_version,
+            options.platform,
         )
     if messages:
         print("\n".join(messages))

From bc591c756a453bb6a78a31e734b1f0aa475e90e0 Mon Sep 17 00:00:00 2001
From: Valentin Stanciu <250871+svalentin@users.noreply.github.com>
Date: Wed, 8 Nov 2023 13:41:43 +0000
Subject: [PATCH 230/288] fix dmypy after junit_xml change (#16421)

https://github.com/python/mypy/pull/16388/ changed the definition of
`write_junit_xml` but missed a call site in dmypy. This fixes it.
---
 mypy/dmypy/client.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypy/dmypy/client.py b/mypy/dmypy/client.py
index 229740e44db0..9f0751e93609 100644
--- a/mypy/dmypy/client.py
+++ b/mypy/dmypy/client.py
@@ -573,7 +573,7 @@ def check_output(
         write_junit_xml(
             response["roundtrip_time"],
             bool(err),
-            messages,
+            {None: messages} if messages else {},
             junit_xml,
             response["python_version"],
             response["platform"],

From a1864d4fa498ccd8773c2247eb62282644174d26 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 9 Nov 2023 00:04:36 +0000
Subject: [PATCH 231/288] Add error code for mutable covariant override
 (#16399)

Fixes https://github.com/python/mypy/issues/3208

Interestingly, we already prohibit this when the override is a mutable
property (goes through `FuncDef`-related code), and in multiple
inheritance. The logic there is not very principled, but I just left a
TODO instead of extending the scope of this PR.
---
 docs/source/error_code_list2.rst     | 31 +++++++++++++++++++++++++++
 mypy/checker.py                      | 21 +++++++++++++++---
 mypy/errorcodes.py                   |  6 ++++++
 mypy/message_registry.py             |  3 +++
 test-data/unit/check-errorcodes.test | 32 ++++++++++++++++++++++++++++
 5 files changed, 90 insertions(+), 3 deletions(-)

diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst
index cc5c9b0a1bc6..9e24f21909d5 100644
--- a/docs/source/error_code_list2.rst
+++ b/docs/source/error_code_list2.rst
@@ -482,6 +482,37 @@ Example:
         def g(self, y: int) -> None:
             pass
 
+.. _code-mutable-override:
+
+Check that overrides of mutable attributes are safe
+---------------------------------------------------
+
+This will enable the check for unsafe overrides of mutable attributes. For
+historical reasons, and because this is a relatively common pattern in Python,
+this check is not enabled by default. The example below is unsafe, and will be
+flagged when this error code is enabled:
+
+.. code-block:: python
+
+    from typing import Any
+
+    class C:
+        x: float
+        y: float
+        z: float
+
+    class D(C):
+        x: int  # Error: Covariant override of a mutable attribute
+                # (base class "C" defined the type as "float",
+                # expression has type "int")  [mutable-override]
+        y: float  # OK
+        z: Any  # OK
+
+    def f(c: C) -> None:
+        c.x = 1.1
+    d = D()
+    f(d)
+    d.x >> 1  # This will crash at runtime, because d.x is now float, not an int
 
 .. _code-unimported-reveal:
 
diff --git a/mypy/checker.py b/mypy/checker.py
index f51ba746ea75..e4eb58d40715 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -2041,7 +2041,6 @@ def check_method_override_for_base_with_name(
                 pass
             elif isinstance(original_type, FunctionLike) and isinstance(typ, FunctionLike):
                 # Check that the types are compatible.
-                # TODO overloaded signatures
                 self.check_override(
                     typ,
                     original_type,
@@ -2056,7 +2055,6 @@ def check_method_override_for_base_with_name(
                 # Assume invariance for a non-callable attribute here. Note
                 # that this doesn't affect read-only properties which can have
                 # covariant overrides.
-                #
                 pass
             elif (
                 original_node
@@ -2636,6 +2634,9 @@ class C(B, A[int]): ...  # this is unsafe because...
         first_type = get_proper_type(self.determine_type_of_member(first))
         second_type = get_proper_type(self.determine_type_of_member(second))
 
+        # TODO: use more principled logic to decide is_subtype() vs is_equivalent().
+        # We should rely on mutability of superclass node, not on types being Callable.
+
         # start with the special case that Instance can be a subtype of FunctionLike
         call = None
         if isinstance(first_type, Instance):
@@ -3211,7 +3212,7 @@ def check_compatibility_super(
                 if base_static and compare_static:
                     lvalue_node.is_staticmethod = True
 
-            return self.check_subtype(
+            ok = self.check_subtype(
                 compare_type,
                 base_type,
                 rvalue,
@@ -3219,6 +3220,20 @@ def check_compatibility_super(
                 "expression has type",
                 f'base class "{base.name}" defined the type as',
             )
+            if (
+                ok
+                and codes.MUTABLE_OVERRIDE in self.options.enabled_error_codes
+                and self.is_writable_attribute(base_node)
+            ):
+                ok = self.check_subtype(
+                    base_type,
+                    compare_type,
+                    rvalue,
+                    message_registry.COVARIANT_OVERRIDE_OF_MUTABLE_ATTRIBUTE,
+                    f'base class "{base.name}" defined the type as',
+                    "expression has type",
+                )
+            return ok
         return True
 
     def lvalue_type_from_base(
diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py
index c6e9de9f31c1..72ee63a6a897 100644
--- a/mypy/errorcodes.py
+++ b/mypy/errorcodes.py
@@ -255,6 +255,12 @@ def __hash__(self) -> int:
     "General",
     default_enabled=False,
 )
+MUTABLE_OVERRIDE: Final[ErrorCode] = ErrorCode(
+    "mutable-override",
+    "Reject covariant overrides for mutable attributes",
+    "General",
+    default_enabled=False,
+)
 
 
 # Syntax errors are often blocking.
diff --git a/mypy/message_registry.py b/mypy/message_registry.py
index 93581d5aca90..8dc14e158d90 100644
--- a/mypy/message_registry.py
+++ b/mypy/message_registry.py
@@ -63,6 +63,9 @@ def with_additional_msg(self, info: str) -> ErrorMessage:
 INCOMPATIBLE_TYPES_IN_ASSIGNMENT: Final = ErrorMessage(
     "Incompatible types in assignment", code=codes.ASSIGNMENT
 )
+COVARIANT_OVERRIDE_OF_MUTABLE_ATTRIBUTE: Final = ErrorMessage(
+    "Covariant override of a mutable attribute", code=codes.MUTABLE_OVERRIDE
+)
 INCOMPATIBLE_TYPES_IN_AWAIT: Final = ErrorMessage('Incompatible types in "await"')
 INCOMPATIBLE_REDEFINITION: Final = ErrorMessage("Incompatible redefinition")
 INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER: Final = (
diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test
index 2282f21bcfa6..28487a456156 100644
--- a/test-data/unit/check-errorcodes.test
+++ b/test-data/unit/check-errorcodes.test
@@ -1148,3 +1148,35 @@ main:3: note: Revealed local types are:
 main:3: note:     x: builtins.int
 main:3: error: Name "reveal_locals" is not defined  [unimported-reveal]
 [builtins fixtures/isinstancelist.pyi]
+
+[case testCovariantMutableOverride]
+# flags: --enable-error-code=mutable-override
+from typing import Any
+
+class C:
+    x: float
+    y: float
+    z: float
+    w: Any
+    @property
+    def foo(self) -> float: ...
+    @property
+    def bar(self) -> float: ...
+    @bar.setter
+    def bar(self, val: float) -> None: ...
+    baz: float
+    bad1: float
+    bad2: float
+class D(C):
+    x: int  # E: Covariant override of a mutable attribute (base class "C" defined the type as "float", expression has type "int")  [mutable-override]
+    y: float
+    z: Any
+    w: float
+    foo: int
+    bar: int  # E: Covariant override of a mutable attribute (base class "C" defined the type as "float", expression has type "int")  [mutable-override]
+    def one(self) -> None:
+        self.baz = 5
+    bad1 = 5  # E: Covariant override of a mutable attribute (base class "C" defined the type as "float", expression has type "int")  [mutable-override]
+    def other(self) -> None:
+        self.bad2: int = 5  # E: Covariant override of a mutable attribute (base class "C" defined the type as "float", expression has type "int")  [mutable-override]
+[builtins fixtures/property.pyi]

From 583813284c64719d016117096907b94eb1b82e74 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Thu, 9 Nov 2023 12:27:46 +0000
Subject: [PATCH 232/288] Add script to generate draft changelog entries
 (#16430)

The script format changelog entries based on commit history and has some
rules to filter out some changes, such as typeshed sync and changes
cherry-picked to the previous release branch.

Example of how to run it:
```
$ python misc/generate_changelog.py 1.7
Generating changelog for 1.7
Previous release was     1.6
Merge base: d7b24514d7301f86031b7d1e2215cf8c2476bec0
NOTE: Drop "Fix crash on ParamSpec unification (for real)", since it was in previous release branch
NOTE: Drop "Fix crash on ParamSpec unification", since it was in previous release branch
NOTE: Drop "Fix mypyc regression with pretty", since it was in previous release branch
NOTE: Drop "Clear cache when adding --new-type-inference", since it was in previous release branch
NOTE: Drop "Match note error codes to import error codes", since it was in previous release branch
NOTE: Drop "Make PEP 695 constructs give a reasonable error message", since it was in previous release branch
NOTE: Drop "Fix ParamSpec inference for callback protocols", since it was in previous release branch
NOTE: Drop "Try upgrading tox", since it was in previous release branch
NOTE: Drop "Optimize Unpack for failures", since it was in previous release branch

 * Fix crash on unpack call special-casing (Ivan Levkivskyi, PR [16381](https://github.com/python/mypy/pull/16381))
 * Fix file reloading in dmypy with --export-types (Ivan Levkivskyi, PR [16359](https://github.com/python/mypy/pull/16359))
 * Fix daemon crash caused by deleted submodule (Jukka Lehtosalo, PR [16370](https://github.com/python/mypy/pull/16370))
...
```
---
 misc/generate_changelog.py | 201 +++++++++++++++++++++++++++++++++++++
 1 file changed, 201 insertions(+)
 create mode 100644 misc/generate_changelog.py

diff --git a/misc/generate_changelog.py b/misc/generate_changelog.py
new file mode 100644
index 000000000000..7c7f28b6eeb7
--- /dev/null
+++ b/misc/generate_changelog.py
@@ -0,0 +1,201 @@
+"""Generate the changelog for a mypy release."""
+
+from __future__ import annotations
+
+import argparse
+import re
+import subprocess
+import sys
+from dataclasses import dataclass
+
+
+def find_all_release_branches() -> list[tuple[int, int]]:
+    result = subprocess.run(["git", "branch", "-r"], text=True, capture_output=True, check=True)
+    versions = []
+    for line in result.stdout.splitlines():
+        line = line.strip()
+        if m := re.match(r"origin/release-([0-9]+)\.([0-9]+)$", line):
+            major = int(m.group(1))
+            minor = int(m.group(2))
+            versions.append((major, minor))
+    return versions
+
+
+def git_merge_base(rev1: str, rev2: str) -> str:
+    result = subprocess.run(
+        ["git", "merge-base", rev1, rev2], text=True, capture_output=True, check=True
+    )
+    return result.stdout.strip()
+
+
+@dataclass
+class CommitInfo:
+    commit: str
+    author: str
+    title: str
+    pr_number: int | None
+
+
+def normalize_author(author: str) -> str:
+    # Some ad-hoc rules to get more consistent author names.
+    if author == "AlexWaygood":
+        return "Alex Waygood"
+    elif author == "jhance":
+        return "Jared Hance"
+    return author
+
+
+def git_commit_log(rev1: str, rev2: str) -> list[CommitInfo]:
+    result = subprocess.run(
+        ["git", "log", "--pretty=%H\t%an\t%s", f"{rev1}..{rev2}"],
+        text=True,
+        capture_output=True,
+        check=True,
+    )
+    commits = []
+    for line in result.stdout.splitlines():
+        commit, author, title = line.strip().split("\t", 2)
+        pr_number = None
+        if m := re.match(r".*\(#([0-9]+)\) *$", title):
+            pr_number = int(m.group(1))
+            title = re.sub(r" *\(#[0-9]+\) *$", "", title)
+
+        author = normalize_author(author)
+        entry = CommitInfo(commit, author, title, pr_number)
+        commits.append(entry)
+    return commits
+
+
+def filter_omitted_commits(commits: list[CommitInfo]) -> list[CommitInfo]:
+    result = []
+    for c in commits:
+        title = c.title
+        keep = True
+        if title.startswith("Sync typeshed"):
+            # Typeshed syncs aren't mentioned in release notes
+            keep = False
+        if title.startswith(
+            (
+                "Revert sum literal integer change",
+                "Remove use of LiteralString in builtins",
+                "Revert typeshed ctypes change",
+                "Revert use of `ParamSpec` for `functools.wraps`",
+            )
+        ):
+            # These are generated by a typeshed sync.
+            keep = False
+        if re.search(r"(bump|update).*version.*\+dev", title.lower()):
+            # Version number updates aren't mentioned
+            keep = False
+        if "pre-commit autoupdate" in title:
+            keep = False
+        if title.startswith(("Update commit hashes", "Update hashes")):
+            # Internal tool change
+            keep = False
+        if keep:
+            result.append(c)
+    return result
+
+
+def normalize_title(title: str) -> str:
+    # We sometimes add a title prefix when cherry-picking commits to a
+    # release branch. Attempt to remove these prefixes so that we can
+    # match them to the corresponding master branch.
+    if m := re.match(r"\[release [0-9.]+\] *", title, flags=re.I):
+        title = title.replace(m.group(0), "")
+    return title
+
+
+def filter_out_commits_from_old_release_branch(
+    new_commits: list[CommitInfo], old_commits: list[CommitInfo]
+) -> list[CommitInfo]:
+    old_titles = {normalize_title(commit.title) for commit in old_commits}
+    result = []
+    for commit in new_commits:
+        drop = False
+        if normalize_title(commit.title) in old_titles:
+            drop = True
+        if normalize_title(f"{commit.title} (#{commit.pr_number})") in old_titles:
+            drop = True
+        if not drop:
+            result.append(commit)
+        else:
+            print(f'NOTE: Drop "{commit.title}", since it was in previous release branch')
+    return result
+
+
+def find_changes_between_releases(old_branch: str, new_branch: str) -> list[CommitInfo]:
+    merge_base = git_merge_base(old_branch, new_branch)
+    print(f"Merge base: {merge_base}")
+    new_commits = git_commit_log(merge_base, new_branch)
+    old_commits = git_commit_log(merge_base, old_branch)
+
+    # Filter out some commits that won't be mentioned in release notes.
+    new_commits = filter_omitted_commits(new_commits)
+
+    # Filter out commits cherry-picked to old branch.
+    new_commits = filter_out_commits_from_old_release_branch(new_commits, old_commits)
+
+    return new_commits
+
+
+def format_changelog_entry(c: CommitInfo) -> str:
+    """
+    s = f" * {c.commit[:9]} - {c.title}"
+    if c.pr_number:
+        s += f" (#{c.pr_number})"
+    s += f" ({c.author})"
+    """
+    s = f" * {c.title} ({c.author}"
+    if c.pr_number:
+        s += f", PR [{c.pr_number}](https://github.com/python/mypy/pull/{c.pr_number})"
+    s += ")"
+
+    return s
+
+
+def main() -> None:
+    parser = argparse.ArgumentParser()
+    parser.add_argument("version", help="target mypy version (form X.Y)")
+    parser.add_argument("--local", action="store_true")
+    args = parser.parse_args()
+    version: str = args.version
+    local: bool = args.local
+
+    if not re.match(r"[0-9]+\.[0-9]+$", version):
+        sys.exit(f"error: Release must be of form X.Y (not {version!r})")
+    major, minor = (int(component) for component in version.split("."))
+
+    if not local:
+        print("Running 'git fetch' to fetch all release branches...")
+        subprocess.run(["git", "fetch"], check=True)
+
+    if minor > 0:
+        prev_major = major
+        prev_minor = minor - 1
+    else:
+        # For a x.0 release, the previous release is the most recent (x-1).y release.
+        all_releases = sorted(find_all_release_branches())
+        if (major, minor) not in all_releases:
+            sys.exit(f"error: Can't find release branch for {major}.{minor} at origin")
+        for i in reversed(range(len(all_releases))):
+            if all_releases[i][0] == major - 1:
+                prev_major, prev_minor = all_releases[i]
+                break
+        else:
+            sys.exit("error: Could not determine previous release")
+    print(f"Generating changelog for {major}.{minor}")
+    print(f"Previous release was     {prev_major}.{prev_minor}")
+
+    new_branch = f"origin/release-{major}.{minor}"
+    old_branch = f"origin/release-{prev_major}.{prev_minor}"
+
+    changes = find_changes_between_releases(old_branch, new_branch)
+
+    print()
+    for c in changes:
+        print(format_changelog_entry(c))
+
+
+if __name__ == "__main__":
+    main()

From f154b756097962325e231f6999c4bfd5d3f4d226 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Thu, 9 Nov 2023 12:33:18 +0000
Subject: [PATCH 233/288] Add draft changelog for 1.7 (#16431)

Generated using `misc/generate_changelog.py`.
---
 CHANGELOG.md | 135 ++++++++++++++++++++++++++++++++++++++++++++++++++-
 1 file changed, 133 insertions(+), 2 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 74f7c676c279..ccd161520e61 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,11 +1,142 @@
 # Mypy Release Notes
 
-## Unreleased
+## Next release
+
+## Mypy 1.7 [unreleased]
+
+We’ve just uploaded mypy 1.7 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:
+
+```
+    python3 -m pip install -U mypy
+```
+
+You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
 
 Stubgen will now include `__all__` in its output if it is in the input file (PR [16356](https://github.com/python/mypy/pull/16356)).
 
 #### Other Notable Changes and Fixes
-...
+
+ * Fix crash on unpack call special-casing (Ivan Levkivskyi, PR [16381](https://github.com/python/mypy/pull/16381))
+ * Fix file reloading in dmypy with --export-types (Ivan Levkivskyi, PR [16359](https://github.com/python/mypy/pull/16359))
+ * Fix daemon crash caused by deleted submodule (Jukka Lehtosalo, PR [16370](https://github.com/python/mypy/pull/16370))
+ * Enable Unpack/TypeVarTuple support (Ivan Levkivskyi, PR [16354](https://github.com/python/mypy/pull/16354))
+ * Fix dmypy inspect on Windows (Ivan Levkivskyi, PR [16355](https://github.com/python/mypy/pull/16355))
+ * Fix dmypy inspect for namespace packages (Ivan Levkivskyi, PR [16357](https://github.com/python/mypy/pull/16357))
+ * Fix incremental crash on TypedDict in method (Ivan Levkivskyi, PR [16364](https://github.com/python/mypy/pull/16364))
+ * doc: remove duplicate word (dinaldoap, PR [16365](https://github.com/python/mypy/pull/16365))
+ * Speed up type argument checking (Jukka Lehtosalo, PR [16353](https://github.com/python/mypy/pull/16353))
+ * Avoid importing from setuptools._distutils (Shantanu, PR [16348](https://github.com/python/mypy/pull/16348))
+ * Add fast path for checking self types (Jukka Lehtosalo, PR [16352](https://github.com/python/mypy/pull/16352))
+ * Cache information about whether file is typeshed file (Jukka Lehtosalo, PR [16351](https://github.com/python/mypy/pull/16351))
+ * Some final touches for variadic types support (Ivan Levkivskyi, PR [16334](https://github.com/python/mypy/pull/16334))
+ * Skip expensive repr() in logging call when not needed (Jukka Lehtosalo, PR [16350](https://github.com/python/mypy/pull/16350))
+ * Enable new type inference by default (Ivan Levkivskyi, PR [16345](https://github.com/python/mypy/pull/16345))
+ * Delete recursive aliases flags (Ivan Levkivskyi, PR [16346](https://github.com/python/mypy/pull/16346))
+ * Update starred expr error message to match Python's (Cibin Mathew, PR [16304](https://github.com/python/mypy/pull/16304))
+ * Properly use proper subtyping for callables (Ivan Levkivskyi, PR [16343](https://github.com/python/mypy/pull/16343))
+ * Write stubs with utf-8 encoding (Jørgen Lind, PR [16329](https://github.com/python/mypy/pull/16329))
+ * Support PEP-646 and PEP-692 in the same callable (Ivan Levkivskyi, PR [16294](https://github.com/python/mypy/pull/16294))
+ * Use upper bound as inference fallback more consistently (Ivan Levkivskyi, PR [16344](https://github.com/python/mypy/pull/16344))
+ * [daemon] Fix return type change to optional in generic function (Jukka Lehtosalo, PR [16342](https://github.com/python/mypy/pull/16342))
+ * [mypyc] Generate error on duplicate function definitions (Jukka Lehtosalo, PR [16309](https://github.com/python/mypy/pull/16309))
+ * Run macOS mypyc tests with Python 3.9 (Shantanu, PR [16326](https://github.com/python/mypy/pull/16326))
+ * Fix sdist build by not including CHANGELOG.md (Jukka Lehtosalo, PR [16323](https://github.com/python/mypy/pull/16323))
+ * Add `|=` and `|` operators support for `TypedDict` (Nikita Sobolev, PR [16249](https://github.com/python/mypy/pull/16249))
+ * Clarify variance convention for Parameters (Ivan Levkivskyi, PR [16302](https://github.com/python/mypy/pull/16302))
+ * refactor: `__str__` in `CFG` class (#16307) (Ihor, PR [16308](https://github.com/python/mypy/pull/16308))
+ * [mypyc] Don't crash on unreachable statements (Jukka Lehtosalo, PR [16311](https://github.com/python/mypy/pull/16311))
+ * stubgen: fix missing property setter in semantic analysis mode (Ali Hamdan, PR [16303](https://github.com/python/mypy/pull/16303))
+ * Narrow tuple types using len() (Ivan Levkivskyi, PR [16237](https://github.com/python/mypy/pull/16237))
+ * Lock test dependencies (Shantanu, PR [16283](https://github.com/python/mypy/pull/16283))
+ * Correctly recognize `typing_extensions.NewType` (Ganden Schaffner, PR [16298](https://github.com/python/mypy/pull/16298))
+ * fix: remove redundant `.format()` (Ihor, PR [16288](https://github.com/python/mypy/pull/16288))
+ * Fix daemon false positives related to module-level __getattr__ (Jukka Lehtosalo, PR [16292](https://github.com/python/mypy/pull/16292))
+ * [mypyc] Avoid cyclic reference in nested functions (Jukka Lehtosalo, PR [16268](https://github.com/python/mypy/pull/16268))
+ * Add `unimported-reveal` error code (Nikita Sobolev, PR [16271](https://github.com/python/mypy/pull/16271))
+ * Add a changelog (Shantanu, PR [16280](https://github.com/python/mypy/pull/16280))
+ * Support fancy new syntax for variadic types (Ivan Levkivskyi, PR [16242](https://github.com/python/mypy/pull/16242))
+ * Attempt to fix daemon crash related to ABCs (Jukka Lehtosalo, PR [16275](https://github.com/python/mypy/pull/16275))
+ * Bump test deps: `ruff` and `pre-commit-hooks` (Nikita Sobolev, PR [16273](https://github.com/python/mypy/pull/16273))
+ * Correctly handle variadic instances with empty arguments (Ivan Levkivskyi, PR [16238](https://github.com/python/mypy/pull/16238))
+ * Stream dmypy output instead of dumping everything at the end (Valentin Stanciu, PR [16252](https://github.com/python/mypy/pull/16252))
+ * stubgen: unify C extension and pure python stub generators with object oriented design (Chad Dombrova, PR [15770](https://github.com/python/mypy/pull/15770))
+ * Correctly handle runtime type applications of variadic types (Ivan Levkivskyi, PR [16240](https://github.com/python/mypy/pull/16240))
+ * [mypyc] Fix direct __dict__ access on inner functions in new Python (Shantanu, PR [16084](https://github.com/python/mypy/pull/16084))
+ * Fix `coverage` config (Alex Waygood, PR [16258](https://github.com/python/mypy/pull/16258))
+ * show dmypy errors post serving (Valentin Stanciu, PR [16250](https://github.com/python/mypy/pull/16250))
+ * Fix partially defined in the case of missing type maps (Shantanu, PR [15995](https://github.com/python/mypy/pull/15995))
+ * (🎁) drop 'dev' from 3.12 in the CI (KotlinIsland, PR [16239](https://github.com/python/mypy/pull/16239))
+ * Add an extra for mypyc dependencies (Shantanu, PR [16229](https://github.com/python/mypy/pull/16229))
+ * Use SPDX license identifier (Nikita Sobolev, PR [16230](https://github.com/python/mypy/pull/16230))
+ * Support variadic tuple packing/unpacking (Ivan Levkivskyi, PR [16205](https://github.com/python/mypy/pull/16205))
+ * Remove stubs packages from `stubinfo.py` where the runtime package has added a `py.typed` file (Alex Waygood, PR [16226](https://github.com/python/mypy/pull/16226))
+ * Bump ruff and black to their latest versions (Alex Waygood, PR [16221](https://github.com/python/mypy/pull/16221))
+ * __qualname__ and __module__ are available in class bodies (Anthony Sottile, PR [16215](https://github.com/python/mypy/pull/16215))
+ * tests: avoid leaving artifacts in the source tree (Eli Schwartz, PR [16201](https://github.com/python/mypy/pull/16201))
+ * Add meta test for new diff logic (Shantanu, PR [16211](https://github.com/python/mypy/pull/16211))
+ * stubtest: hint when args in stub need to be keyword-only (Alex Waygood, PR [16210](https://github.com/python/mypy/pull/16210))
+ * tuple slice should not propagate fallback (Thomas Grainger, PR [16154](https://github.com/python/mypy/pull/16154))
+ * Fix cases of type object handling for overloads (Shantanu, PR [16168](https://github.com/python/mypy/pull/16168))
+ * Fix error code on "Maybe you forgot to use await" note (Jelle Zijlstra, PR [16203](https://github.com/python/mypy/pull/16203))
+ * Add more tests for variadic Callables (Ivan Levkivskyi, PR [16198](https://github.com/python/mypy/pull/16198))
+ * Fix walrus interaction with empty collections (Ivan Levkivskyi, PR [16197](https://github.com/python/mypy/pull/16197))
+ * Better support for variadic calls and indexing (Ivan Levkivskyi, PR [16131](https://github.com/python/mypy/pull/16131))
+ * Use type variable bound when it appears as actual during inference (Ivan Levkivskyi, PR [16178](https://github.com/python/mypy/pull/16178))
+ * Use upper bounds as fallback solutions for inference (Ivan Levkivskyi, PR [16184](https://github.com/python/mypy/pull/16184))
+ * Special-case type inference of empty collections (Ivan Levkivskyi, PR [16122](https://github.com/python/mypy/pull/16122))
+ * stubgen: multiple fixes to the generated imports (Ali Hamdan, PR [15624](https://github.com/python/mypy/pull/15624))
+ * Fix typo in dataclasses.py (Ikko Eltociear Ashimine, PR [16173](https://github.com/python/mypy/pull/16173))
+ * Remove `is_classmethod_class` slot from `CallableType` (Nikita Sobolev, PR [16151](https://github.com/python/mypy/pull/16151))
+ * Do not consider `import a.b as b` an explicit reexport (Anders Kaseorg, PR [14086](https://github.com/python/mypy/pull/14086))
+ * Fix crash on dataclass field / property collision (Nikita Sobolev, PR [16147](https://github.com/python/mypy/pull/16147))
+ * Fix inference for overloaded __call__ with generic self (Shantanu, PR [16053](https://github.com/python/mypy/pull/16053))
+ * Call dynamic class hook on generic classes (Petter Friberg, PR [16052](https://github.com/python/mypy/pull/16052))
+ * Preserve implicitly exported types via attribute access (Shantanu, PR [16129](https://github.com/python/mypy/pull/16129))
+ * Make it easier to copy commands from docs README (Hamir Mahal, PR [16133](https://github.com/python/mypy/pull/16133))
+ * Use comments in issue template (Hamir Mahal, PR [15742](https://github.com/python/mypy/pull/15742))
+ * dataclass.replace: allow transformed classes (Ilya Priven, PR [15915](https://github.com/python/mypy/pull/15915))
+ * Fix the newly-uncovered stubtest bug (Alex Waygood)
+ * Fix crash on star unpack in TypedDict (Ivan Levkivskyi, PR [16116](https://github.com/python/mypy/pull/16116))
+ * stubgen: generate valid dataclass stubs (Ali Hamdan, PR [15625](https://github.com/python/mypy/pull/15625))
+ * Fix crash on malformed TypedDict in incremental mode (Ivan Levkivskyi, PR [16115](https://github.com/python/mypy/pull/16115))
+ * Better diffs in tests (Shantanu, PR [16112](https://github.com/python/mypy/pull/16112))
+ * Fix tuple[Any, ...] subtyping (Shantanu, PR [16108](https://github.com/python/mypy/pull/16108))
+ * Lenient handling of trivial Callable suffixes (Ivan Levkivskyi, PR [15913](https://github.com/python/mypy/pull/15913))
+ * Subtyping and inference of user defined variadic types (Ivan Levkivskyi, PR [16076](https://github.com/python/mypy/pull/16076))
+ * [mypyc] Make tuple packing and unpacking more efficient (Jukka Lehtosalo, PR [16022](https://github.com/python/mypy/pull/16022))
+ * Allow TypedDict unpacking in Callable types (Ivan Levkivskyi, PR [16083](https://github.com/python/mypy/pull/16083))
+ * Add `add_overloaded_method_to_class` helper to `plugins/common.py` (Nikita Sobolev, PR [16038](https://github.com/python/mypy/pull/16038))
+ * Document and rename overload-overlap error code (Shantanu, PR [16074](https://github.com/python/mypy/pull/16074))
+ * Fix __post_init__() internal error (Ilya Priven, PR [16080](https://github.com/python/mypy/pull/16080))
+ * Differentiate between venv and tox setups in CONTRIBUTING.md (Matt Bogosian, PR [16067](https://github.com/python/mypy/pull/16067))
+ * Complete type analysis of variadic types (Ivan Levkivskyi, PR [15991](https://github.com/python/mypy/pull/15991))
+ * Build the docs in CI for all PRs touching the `mypy/` directory (Alex Waygood, PR [16068](https://github.com/python/mypy/pull/16068))
+ * Introduce error category [unsafe-overload] (Randolf Scholz, PR [16061](https://github.com/python/mypy/pull/16061))
+ * Add docs about `--force-uppercase-builtins` and `--force-union-syntax` (Nikita Sobolev, PR [16049](https://github.com/python/mypy/pull/16049))
+ * Document `force_union_syntax` and `force_uppercase_builtins` (Nikita Sobolev, PR [16048](https://github.com/python/mypy/pull/16048))
+ * Use latest `actions/checkout@v4` (Nikita Sobolev, PR [16042](https://github.com/python/mypy/pull/16042))
+ * Do not use deprecated `add_method` in `attrs` plugin (Nikita Sobolev, PR [16037](https://github.com/python/mypy/pull/16037))
+ * Remove type aliases that are long supported (Nikita Sobolev, PR [16039](https://github.com/python/mypy/pull/16039))
+ * Add type annotations to `test-data/unit/plugins` (Nikita Sobolev, PR [16028](https://github.com/python/mypy/pull/16028))
+ * Bundle `misc/proper_plugin.py` as a part of `mypy` (Nikita Sobolev, PR [16036](https://github.com/python/mypy/pull/16036))
+ * Do not set `is_final` twice for `FuncBase` subclasses (Nikita Sobolev, PR [16030](https://github.com/python/mypy/pull/16030))
+ * Exclude `assert False` from coverage (Nikita Sobolev, PR [16026](https://github.com/python/mypy/pull/16026))
+ * ruff: add pyupgrade (Ilya Priven, PR [16023](https://github.com/python/mypy/pull/16023))
+ * Document we're not tracking relationships between symbols (Ilya Priven, PR [16018](https://github.com/python/mypy/pull/16018))
+ * meta tests: refactor run_pytest (Ilya Priven, PR [15481](https://github.com/python/mypy/pull/15481))
+ * docs: document dataclass_transform behavior (Ilya Priven, PR [16017](https://github.com/python/mypy/pull/16017))
+ * Fix crash with report generation on namespace packages (again) (Shantanu, PR [16019](https://github.com/python/mypy/pull/16019))
+ * Reword the error message related to void functions (Albert Tugushev, PR [15876](https://github.com/python/mypy/pull/15876))
+ * Fix case Any() in match statement (DS/Charlie, PR [14479](https://github.com/python/mypy/pull/14479))
+ * Fix crash when parsing error code config with typo (Shantanu, PR [16005](https://github.com/python/mypy/pull/16005))
+ * Deduplicate iterable logic (Shantanu, PR [16006](https://github.com/python/mypy/pull/16006))
+ * Represent bottom type as Never in messages (Shantanu, PR [15996](https://github.com/python/mypy/pull/15996))
+ * Add hint for AsyncIterator incompatible return type (Ilya Priven, PR [15883](https://github.com/python/mypy/pull/15883))
+ * Fix inference for properties with __call__ (Shantanu, PR [15926](https://github.com/python/mypy/pull/15926))
+ * attrs: remove fields type check (Ilya Priven, PR [15983](https://github.com/python/mypy/pull/15983))
+ * dataclasses.replace: fall through to typeshed sig (Ilya Priven, PR [15962](https://github.com/python/mypy/pull/15962))
+ * Improve GitHub Actions specs (Nikita Sobolev, PR [15965](https://github.com/python/mypy/pull/15965))
+ * attrs, dataclasses: don't enforce slots when base doesn't (Ilya Priven, PR [15976](https://github.com/python/mypy/pull/15976))
 
 #### Acknowledgements
 ...

From ed7cc08db1ac78d4ff7191eddaa907e2f96b6ffd Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Thu, 9 Nov 2023 14:49:34 +0000
Subject: [PATCH 234/288] Updates to 1.7 changelog (#16435)

Add sections for related changes. Remove some internal changes and make
other small tweaks.
---
 CHANGELOG.md | 198 +++++++++++++++++++++++++++------------------------
 1 file changed, 104 insertions(+), 94 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index ccd161520e61..68efa2cbfc46 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -14,129 +14,139 @@ You can read the full documentation for this release on [Read the Docs](http://m
 
 Stubgen will now include `__all__` in its output if it is in the input file (PR [16356](https://github.com/python/mypy/pull/16356)).
 
-#### Other Notable Changes and Fixes
+#### TypeVarTuple and Unpack Support Enabled
+
+TODO: Explain
+
+TypeVarTuple was implemented by Jared Hance and Ivan Levkivskyi over several mypy releases, with help from Jukka Lehtosalo.
 
+Changes included in this release:
+
+ * Enable Unpack/TypeVarTuple support (Ivan Levkivskyi, PR [16354](https://github.com/python/mypy/pull/16354))
  * Fix crash on unpack call special-casing (Ivan Levkivskyi, PR [16381](https://github.com/python/mypy/pull/16381))
- * Fix file reloading in dmypy with --export-types (Ivan Levkivskyi, PR [16359](https://github.com/python/mypy/pull/16359))
+ * Some final touches for variadic types support (Ivan Levkivskyi, PR [16334](https://github.com/python/mypy/pull/16334))
+ * Support PEP-646 and PEP-692 in the same callable (Ivan Levkivskyi, PR [16294](https://github.com/python/mypy/pull/16294))
+ * Support fancy new syntax for variadic types (Ivan Levkivskyi, PR [16242](https://github.com/python/mypy/pull/16242))
+ * Correctly handle variadic instances with empty arguments (Ivan Levkivskyi, PR [16238](https://github.com/python/mypy/pull/16238))
+ * Correctly handle runtime type applications of variadic types (Ivan Levkivskyi, PR [16240](https://github.com/python/mypy/pull/16240))
+ * Support variadic tuple packing/unpacking (Ivan Levkivskyi, PR [16205](https://github.com/python/mypy/pull/16205))
+ * Better support for variadic calls and indexing (Ivan Levkivskyi, PR [16131](https://github.com/python/mypy/pull/16131))
+ * Subtyping and inference of user defined variadic types (Ivan Levkivskyi, PR [16076](https://github.com/python/mypy/pull/16076))
+ * Complete type analysis of variadic types (Ivan Levkivskyi, PR [15991](https://github.com/python/mypy/pull/15991))
+ * Allow TypedDict unpacking in Callable types (Ivan Levkivskyi, PR [16083](https://github.com/python/mypy/pull/16083))
+
+#### Major Changes
+
+TODO: Write sections about these?
+
+ * Enable new type inference by default (Ivan Levkivskyi, PR [16345](https://github.com/python/mypy/pull/16345))
+ * Narrow tuple types using len() (Ivan Levkivskyi, PR [16237](https://github.com/python/mypy/pull/16237))
+ * Do not consider `import a.b as b` an explicit reexport (Anders Kaseorg, PR [14086](https://github.com/python/mypy/pull/14086))
+ * Add an extra for mypyc dependencies (Shantanu, PR [16229](https://github.com/python/mypy/pull/16229))
+ * Add a changelog (Shantanu, PR [16280](https://github.com/python/mypy/pull/16280))
+
+#### Mypy Daemon Improvements
+
  * Fix daemon crash caused by deleted submodule (Jukka Lehtosalo, PR [16370](https://github.com/python/mypy/pull/16370))
- * Enable Unpack/TypeVarTuple support (Ivan Levkivskyi, PR [16354](https://github.com/python/mypy/pull/16354))
+ * Fix file reloading in dmypy with --export-types (Ivan Levkivskyi, PR [16359](https://github.com/python/mypy/pull/16359))
  * Fix dmypy inspect on Windows (Ivan Levkivskyi, PR [16355](https://github.com/python/mypy/pull/16355))
  * Fix dmypy inspect for namespace packages (Ivan Levkivskyi, PR [16357](https://github.com/python/mypy/pull/16357))
- * Fix incremental crash on TypedDict in method (Ivan Levkivskyi, PR [16364](https://github.com/python/mypy/pull/16364))
- * doc: remove duplicate word (dinaldoap, PR [16365](https://github.com/python/mypy/pull/16365))
+ * Fix return type change to optional in generic function (Jukka Lehtosalo, PR [16342](https://github.com/python/mypy/pull/16342))
+ * Fix daemon false positives related to module-level __getattr__ (Jukka Lehtosalo, PR [16292](https://github.com/python/mypy/pull/16292))
+ * Attempt to fix daemon crash related to ABCs (Jukka Lehtosalo, PR [16275](https://github.com/python/mypy/pull/16275))
+ * Stream dmypy output instead of dumping everything at the end (Valentin Stanciu, PR [16252](https://github.com/python/mypy/pull/16252))
+ * Show dmypy errors post serving (Valentin Stanciu, PR [16250](https://github.com/python/mypy/pull/16250))
+
+#### Mypyc Improvements
+
+ * Generate error on duplicate function definitions (Jukka Lehtosalo, PR [16309](https://github.com/python/mypy/pull/16309))
+ * Don't crash on unreachable statements (Jukka Lehtosalo, PR [16311](https://github.com/python/mypy/pull/16311))
+ * Avoid cyclic reference in nested functions (Jukka Lehtosalo, PR [16268](https://github.com/python/mypy/pull/16268))
+ * Fix direct __dict__ access on inner functions in new Python (Shantanu, PR [16084](https://github.com/python/mypy/pull/16084))
+ * Make tuple packing and unpacking more efficient (Jukka Lehtosalo, PR [16022](https://github.com/python/mypy/pull/16022))
+
+#### Improvements to Error Reportong
+
+ * Update starred expr error message to match Python's (Cibin Mathew, PR [16304](https://github.com/python/mypy/pull/16304))
+ * Add `unimported-reveal` error code (Nikita Sobolev, PR [16271](https://github.com/python/mypy/pull/16271))
+ * Fix error code on "Maybe you forgot to use await" note (Jelle Zijlstra, PR [16203](https://github.com/python/mypy/pull/16203))
+ * Introduce error category [unsafe-overload] (Randolf Scholz, PR [16061](https://github.com/python/mypy/pull/16061))
+ * Reword the error message related to void functions (Albert Tugushev, PR [15876](https://github.com/python/mypy/pull/15876))
+ * Represent bottom type as Never in messages (Shantanu, PR [15996](https://github.com/python/mypy/pull/15996))
+ * Add hint for AsyncIterator incompatible return type (Ilya Priven, PR [15883](https://github.com/python/mypy/pull/15883))
+ * Remove stubs packages from `stubinfo.py` where the runtime package has added a `py.typed` file (Alex Waygood, PR [16226](https://github.com/python/mypy/pull/16226))
+
+#### Performance Improvements
+
  * Speed up type argument checking (Jukka Lehtosalo, PR [16353](https://github.com/python/mypy/pull/16353))
- * Avoid importing from setuptools._distutils (Shantanu, PR [16348](https://github.com/python/mypy/pull/16348))
  * Add fast path for checking self types (Jukka Lehtosalo, PR [16352](https://github.com/python/mypy/pull/16352))
  * Cache information about whether file is typeshed file (Jukka Lehtosalo, PR [16351](https://github.com/python/mypy/pull/16351))
- * Some final touches for variadic types support (Ivan Levkivskyi, PR [16334](https://github.com/python/mypy/pull/16334))
  * Skip expensive repr() in logging call when not needed (Jukka Lehtosalo, PR [16350](https://github.com/python/mypy/pull/16350))
- * Enable new type inference by default (Ivan Levkivskyi, PR [16345](https://github.com/python/mypy/pull/16345))
+
+#### Attrs and Dataclass Improvements
+
+ * dataclass.replace: allow transformed classes (Ilya Priven, PR [15915](https://github.com/python/mypy/pull/15915))
+ * docs: document dataclass_transform behavior (Ilya Priven, PR [16017](https://github.com/python/mypy/pull/16017))
+ * dataclasses.replace: fall through to typeshed sig (Ilya Priven, PR [15962](https://github.com/python/mypy/pull/15962))
+ * attrs: remove fields type check (Ilya Priven, PR [15983](https://github.com/python/mypy/pull/15983))
+ * attrs, dataclasses: don't enforce slots when base doesn't (Ilya Priven, PR [15976](https://github.com/python/mypy/pull/15976))
+
+#### Stubgen Improvements
+
+ * Write stubs with utf-8 encoding (Jørgen Lind, PR [16329](https://github.com/python/mypy/pull/16329))
+ * stubgen: fix missing property setter in semantic analysis mode (Ali Hamdan, PR [16303](https://github.com/python/mypy/pull/16303))
+ * stubgen: unify C extension and pure python stub generators with object oriented design (Chad Dombrova, PR [15770](https://github.com/python/mypy/pull/15770))
+ * stubgen: multiple fixes to the generated imports (Ali Hamdan, PR [15624](https://github.com/python/mypy/pull/15624))
+ * stubgen: generate valid dataclass stubs (Ali Hamdan, PR [15625](https://github.com/python/mypy/pull/15625))
+
+#### Fixes to Crashes
+
+ * Fix incremental crash on TypedDict in method (Ivan Levkivskyi, PR [16364](https://github.com/python/mypy/pull/16364))
+ * Fix crash on dataclass field / property collision (Nikita Sobolev, PR [16147](https://github.com/python/mypy/pull/16147))
+ * Fix crash on star unpack in TypedDict (Ivan Levkivskyi, PR [16116](https://github.com/python/mypy/pull/16116))
+ * Fix crash on malformed TypedDict in incremental mode (Ivan Levkivskyi, PR [16115](https://github.com/python/mypy/pull/16115))
+ * Fix crash with report generation on namespace packages (again) (Shantanu, PR [16019](https://github.com/python/mypy/pull/16019))
+ * Fix crash when parsing error code config with typo (Shantanu, PR [16005](https://github.com/python/mypy/pull/16005))
+ * Fix `__post_init__()` internal error (Ilya Priven, PR [16080](https://github.com/python/mypy/pull/16080))
+
+#### Documentation Updates
+
+ * Make it easier to copy commands from docs README (Hamir Mahal, PR [16133](https://github.com/python/mypy/pull/16133))
+ * Document and rename overload-overlap error code (Shantanu, PR [16074](https://github.com/python/mypy/pull/16074))
+ * Add docs about `--force-uppercase-builtins` and `--force-union-syntax` (Nikita Sobolev, PR [16049](https://github.com/python/mypy/pull/16049))
+ * Document `force_union_syntax` and `force_uppercase_builtins` (Nikita Sobolev, PR [16048](https://github.com/python/mypy/pull/16048))
+ * Document we're not tracking relationships between symbols (Ilya Priven, PR [16018](https://github.com/python/mypy/pull/16018))
+
+#### Other Notable Changes and Fixes
+
+ * Avoid importing from setuptools._distutils (Shantanu, PR [16348](https://github.com/python/mypy/pull/16348))
  * Delete recursive aliases flags (Ivan Levkivskyi, PR [16346](https://github.com/python/mypy/pull/16346))
- * Update starred expr error message to match Python's (Cibin Mathew, PR [16304](https://github.com/python/mypy/pull/16304))
  * Properly use proper subtyping for callables (Ivan Levkivskyi, PR [16343](https://github.com/python/mypy/pull/16343))
- * Write stubs with utf-8 encoding (Jørgen Lind, PR [16329](https://github.com/python/mypy/pull/16329))
- * Support PEP-646 and PEP-692 in the same callable (Ivan Levkivskyi, PR [16294](https://github.com/python/mypy/pull/16294))
  * Use upper bound as inference fallback more consistently (Ivan Levkivskyi, PR [16344](https://github.com/python/mypy/pull/16344))
- * [daemon] Fix return type change to optional in generic function (Jukka Lehtosalo, PR [16342](https://github.com/python/mypy/pull/16342))
- * [mypyc] Generate error on duplicate function definitions (Jukka Lehtosalo, PR [16309](https://github.com/python/mypy/pull/16309))
- * Run macOS mypyc tests with Python 3.9 (Shantanu, PR [16326](https://github.com/python/mypy/pull/16326))
- * Fix sdist build by not including CHANGELOG.md (Jukka Lehtosalo, PR [16323](https://github.com/python/mypy/pull/16323))
  * Add `|=` and `|` operators support for `TypedDict` (Nikita Sobolev, PR [16249](https://github.com/python/mypy/pull/16249))
  * Clarify variance convention for Parameters (Ivan Levkivskyi, PR [16302](https://github.com/python/mypy/pull/16302))
- * refactor: `__str__` in `CFG` class (#16307) (Ihor, PR [16308](https://github.com/python/mypy/pull/16308))
- * [mypyc] Don't crash on unreachable statements (Jukka Lehtosalo, PR [16311](https://github.com/python/mypy/pull/16311))
- * stubgen: fix missing property setter in semantic analysis mode (Ali Hamdan, PR [16303](https://github.com/python/mypy/pull/16303))
- * Narrow tuple types using len() (Ivan Levkivskyi, PR [16237](https://github.com/python/mypy/pull/16237))
- * Lock test dependencies (Shantanu, PR [16283](https://github.com/python/mypy/pull/16283))
  * Correctly recognize `typing_extensions.NewType` (Ganden Schaffner, PR [16298](https://github.com/python/mypy/pull/16298))
- * fix: remove redundant `.format()` (Ihor, PR [16288](https://github.com/python/mypy/pull/16288))
- * Fix daemon false positives related to module-level __getattr__ (Jukka Lehtosalo, PR [16292](https://github.com/python/mypy/pull/16292))
- * [mypyc] Avoid cyclic reference in nested functions (Jukka Lehtosalo, PR [16268](https://github.com/python/mypy/pull/16268))
- * Add `unimported-reveal` error code (Nikita Sobolev, PR [16271](https://github.com/python/mypy/pull/16271))
- * Add a changelog (Shantanu, PR [16280](https://github.com/python/mypy/pull/16280))
- * Support fancy new syntax for variadic types (Ivan Levkivskyi, PR [16242](https://github.com/python/mypy/pull/16242))
- * Attempt to fix daemon crash related to ABCs (Jukka Lehtosalo, PR [16275](https://github.com/python/mypy/pull/16275))
- * Bump test deps: `ruff` and `pre-commit-hooks` (Nikita Sobolev, PR [16273](https://github.com/python/mypy/pull/16273))
- * Correctly handle variadic instances with empty arguments (Ivan Levkivskyi, PR [16238](https://github.com/python/mypy/pull/16238))
- * Stream dmypy output instead of dumping everything at the end (Valentin Stanciu, PR [16252](https://github.com/python/mypy/pull/16252))
- * stubgen: unify C extension and pure python stub generators with object oriented design (Chad Dombrova, PR [15770](https://github.com/python/mypy/pull/15770))
- * Correctly handle runtime type applications of variadic types (Ivan Levkivskyi, PR [16240](https://github.com/python/mypy/pull/16240))
- * [mypyc] Fix direct __dict__ access on inner functions in new Python (Shantanu, PR [16084](https://github.com/python/mypy/pull/16084))
- * Fix `coverage` config (Alex Waygood, PR [16258](https://github.com/python/mypy/pull/16258))
- * show dmypy errors post serving (Valentin Stanciu, PR [16250](https://github.com/python/mypy/pull/16250))
  * Fix partially defined in the case of missing type maps (Shantanu, PR [15995](https://github.com/python/mypy/pull/15995))
- * (🎁) drop 'dev' from 3.12 in the CI (KotlinIsland, PR [16239](https://github.com/python/mypy/pull/16239))
- * Add an extra for mypyc dependencies (Shantanu, PR [16229](https://github.com/python/mypy/pull/16229))
  * Use SPDX license identifier (Nikita Sobolev, PR [16230](https://github.com/python/mypy/pull/16230))
- * Support variadic tuple packing/unpacking (Ivan Levkivskyi, PR [16205](https://github.com/python/mypy/pull/16205))
- * Remove stubs packages from `stubinfo.py` where the runtime package has added a `py.typed` file (Alex Waygood, PR [16226](https://github.com/python/mypy/pull/16226))
- * Bump ruff and black to their latest versions (Alex Waygood, PR [16221](https://github.com/python/mypy/pull/16221))
- * __qualname__ and __module__ are available in class bodies (Anthony Sottile, PR [16215](https://github.com/python/mypy/pull/16215))
- * tests: avoid leaving artifacts in the source tree (Eli Schwartz, PR [16201](https://github.com/python/mypy/pull/16201))
- * Add meta test for new diff logic (Shantanu, PR [16211](https://github.com/python/mypy/pull/16211))
- * stubtest: hint when args in stub need to be keyword-only (Alex Waygood, PR [16210](https://github.com/python/mypy/pull/16210))
- * tuple slice should not propagate fallback (Thomas Grainger, PR [16154](https://github.com/python/mypy/pull/16154))
+ * `__qualname__` and `__module__` are available in class bodies (Anthony Sottile, PR [16215](https://github.com/python/mypy/pull/16215))
+ * stubtest: Hint when args in stub need to be keyword-only (Alex Waygood, PR [16210](https://github.com/python/mypy/pull/16210))
+ * Tuple slice should not propagate fallback (Thomas Grainger, PR [16154](https://github.com/python/mypy/pull/16154))
  * Fix cases of type object handling for overloads (Shantanu, PR [16168](https://github.com/python/mypy/pull/16168))
- * Fix error code on "Maybe you forgot to use await" note (Jelle Zijlstra, PR [16203](https://github.com/python/mypy/pull/16203))
- * Add more tests for variadic Callables (Ivan Levkivskyi, PR [16198](https://github.com/python/mypy/pull/16198))
  * Fix walrus interaction with empty collections (Ivan Levkivskyi, PR [16197](https://github.com/python/mypy/pull/16197))
- * Better support for variadic calls and indexing (Ivan Levkivskyi, PR [16131](https://github.com/python/mypy/pull/16131))
  * Use type variable bound when it appears as actual during inference (Ivan Levkivskyi, PR [16178](https://github.com/python/mypy/pull/16178))
  * Use upper bounds as fallback solutions for inference (Ivan Levkivskyi, PR [16184](https://github.com/python/mypy/pull/16184))
  * Special-case type inference of empty collections (Ivan Levkivskyi, PR [16122](https://github.com/python/mypy/pull/16122))
- * stubgen: multiple fixes to the generated imports (Ali Hamdan, PR [15624](https://github.com/python/mypy/pull/15624))
- * Fix typo in dataclasses.py (Ikko Eltociear Ashimine, PR [16173](https://github.com/python/mypy/pull/16173))
- * Remove `is_classmethod_class` slot from `CallableType` (Nikita Sobolev, PR [16151](https://github.com/python/mypy/pull/16151))
- * Do not consider `import a.b as b` an explicit reexport (Anders Kaseorg, PR [14086](https://github.com/python/mypy/pull/14086))
- * Fix crash on dataclass field / property collision (Nikita Sobolev, PR [16147](https://github.com/python/mypy/pull/16147))
- * Fix inference for overloaded __call__ with generic self (Shantanu, PR [16053](https://github.com/python/mypy/pull/16053))
+ * Fix inference for overloaded `__call__` with generic self (Shantanu, PR [16053](https://github.com/python/mypy/pull/16053))
  * Call dynamic class hook on generic classes (Petter Friberg, PR [16052](https://github.com/python/mypy/pull/16052))
  * Preserve implicitly exported types via attribute access (Shantanu, PR [16129](https://github.com/python/mypy/pull/16129))
- * Make it easier to copy commands from docs README (Hamir Mahal, PR [16133](https://github.com/python/mypy/pull/16133))
- * Use comments in issue template (Hamir Mahal, PR [15742](https://github.com/python/mypy/pull/15742))
- * dataclass.replace: allow transformed classes (Ilya Priven, PR [15915](https://github.com/python/mypy/pull/15915))
- * Fix the newly-uncovered stubtest bug (Alex Waygood)
- * Fix crash on star unpack in TypedDict (Ivan Levkivskyi, PR [16116](https://github.com/python/mypy/pull/16116))
- * stubgen: generate valid dataclass stubs (Ali Hamdan, PR [15625](https://github.com/python/mypy/pull/15625))
- * Fix crash on malformed TypedDict in incremental mode (Ivan Levkivskyi, PR [16115](https://github.com/python/mypy/pull/16115))
- * Better diffs in tests (Shantanu, PR [16112](https://github.com/python/mypy/pull/16112))
- * Fix tuple[Any, ...] subtyping (Shantanu, PR [16108](https://github.com/python/mypy/pull/16108))
+ * Fix a stubtest bug (Alex Waygood)
+ * Fix `tuple[Any, ...]` subtyping (Shantanu, PR [16108](https://github.com/python/mypy/pull/16108))
  * Lenient handling of trivial Callable suffixes (Ivan Levkivskyi, PR [15913](https://github.com/python/mypy/pull/15913))
- * Subtyping and inference of user defined variadic types (Ivan Levkivskyi, PR [16076](https://github.com/python/mypy/pull/16076))
- * [mypyc] Make tuple packing and unpacking more efficient (Jukka Lehtosalo, PR [16022](https://github.com/python/mypy/pull/16022))
- * Allow TypedDict unpacking in Callable types (Ivan Levkivskyi, PR [16083](https://github.com/python/mypy/pull/16083))
  * Add `add_overloaded_method_to_class` helper to `plugins/common.py` (Nikita Sobolev, PR [16038](https://github.com/python/mypy/pull/16038))
- * Document and rename overload-overlap error code (Shantanu, PR [16074](https://github.com/python/mypy/pull/16074))
- * Fix __post_init__() internal error (Ilya Priven, PR [16080](https://github.com/python/mypy/pull/16080))
- * Differentiate between venv and tox setups in CONTRIBUTING.md (Matt Bogosian, PR [16067](https://github.com/python/mypy/pull/16067))
- * Complete type analysis of variadic types (Ivan Levkivskyi, PR [15991](https://github.com/python/mypy/pull/15991))
- * Build the docs in CI for all PRs touching the `mypy/` directory (Alex Waygood, PR [16068](https://github.com/python/mypy/pull/16068))
- * Introduce error category [unsafe-overload] (Randolf Scholz, PR [16061](https://github.com/python/mypy/pull/16061))
- * Add docs about `--force-uppercase-builtins` and `--force-union-syntax` (Nikita Sobolev, PR [16049](https://github.com/python/mypy/pull/16049))
- * Document `force_union_syntax` and `force_uppercase_builtins` (Nikita Sobolev, PR [16048](https://github.com/python/mypy/pull/16048))
- * Use latest `actions/checkout@v4` (Nikita Sobolev, PR [16042](https://github.com/python/mypy/pull/16042))
- * Do not use deprecated `add_method` in `attrs` plugin (Nikita Sobolev, PR [16037](https://github.com/python/mypy/pull/16037))
- * Remove type aliases that are long supported (Nikita Sobolev, PR [16039](https://github.com/python/mypy/pull/16039))
- * Add type annotations to `test-data/unit/plugins` (Nikita Sobolev, PR [16028](https://github.com/python/mypy/pull/16028))
  * Bundle `misc/proper_plugin.py` as a part of `mypy` (Nikita Sobolev, PR [16036](https://github.com/python/mypy/pull/16036))
- * Do not set `is_final` twice for `FuncBase` subclasses (Nikita Sobolev, PR [16030](https://github.com/python/mypy/pull/16030))
- * Exclude `assert False` from coverage (Nikita Sobolev, PR [16026](https://github.com/python/mypy/pull/16026))
- * ruff: add pyupgrade (Ilya Priven, PR [16023](https://github.com/python/mypy/pull/16023))
- * Document we're not tracking relationships between symbols (Ilya Priven, PR [16018](https://github.com/python/mypy/pull/16018))
- * meta tests: refactor run_pytest (Ilya Priven, PR [15481](https://github.com/python/mypy/pull/15481))
- * docs: document dataclass_transform behavior (Ilya Priven, PR [16017](https://github.com/python/mypy/pull/16017))
- * Fix crash with report generation on namespace packages (again) (Shantanu, PR [16019](https://github.com/python/mypy/pull/16019))
- * Reword the error message related to void functions (Albert Tugushev, PR [15876](https://github.com/python/mypy/pull/15876))
  * Fix case Any() in match statement (DS/Charlie, PR [14479](https://github.com/python/mypy/pull/14479))
- * Fix crash when parsing error code config with typo (Shantanu, PR [16005](https://github.com/python/mypy/pull/16005))
  * Deduplicate iterable logic (Shantanu, PR [16006](https://github.com/python/mypy/pull/16006))
- * Represent bottom type as Never in messages (Shantanu, PR [15996](https://github.com/python/mypy/pull/15996))
- * Add hint for AsyncIterator incompatible return type (Ilya Priven, PR [15883](https://github.com/python/mypy/pull/15883))
- * Fix inference for properties with __call__ (Shantanu, PR [15926](https://github.com/python/mypy/pull/15926))
- * attrs: remove fields type check (Ilya Priven, PR [15983](https://github.com/python/mypy/pull/15983))
- * dataclasses.replace: fall through to typeshed sig (Ilya Priven, PR [15962](https://github.com/python/mypy/pull/15962))
- * Improve GitHub Actions specs (Nikita Sobolev, PR [15965](https://github.com/python/mypy/pull/15965))
- * attrs, dataclasses: don't enforce slots when base doesn't (Ilya Priven, PR [15976](https://github.com/python/mypy/pull/15976))
+ * Fix inference for properties with `__call__` (Shantanu, PR [15926](https://github.com/python/mypy/pull/15926))
 
 #### Acknowledgements
 ...

From a1648f555636a5e3aeb99dbf59b394e6939c1344 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Thu, 9 Nov 2023 15:00:29 +0000
Subject: [PATCH 235/288] Add typeshed and acknowledgements sections to 1.7
 changelog (#16436)

---
 CHANGELOG.md | 41 +++++++++++++++++++++++++++++++++++++++--
 1 file changed, 39 insertions(+), 2 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 68efa2cbfc46..17b0084d6e5d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -62,7 +62,7 @@ TODO: Write sections about these?
  * Generate error on duplicate function definitions (Jukka Lehtosalo, PR [16309](https://github.com/python/mypy/pull/16309))
  * Don't crash on unreachable statements (Jukka Lehtosalo, PR [16311](https://github.com/python/mypy/pull/16311))
  * Avoid cyclic reference in nested functions (Jukka Lehtosalo, PR [16268](https://github.com/python/mypy/pull/16268))
- * Fix direct __dict__ access on inner functions in new Python (Shantanu, PR [16084](https://github.com/python/mypy/pull/16084))
+ * Fix direct `__dict__` access on inner functions in new Python (Shantanu, PR [16084](https://github.com/python/mypy/pull/16084))
  * Make tuple packing and unpacking more efficient (Jukka Lehtosalo, PR [16022](https://github.com/python/mypy/pull/16022))
 
 #### Improvements to Error Reportong
@@ -148,8 +148,45 @@ TODO: Write sections about these?
  * Deduplicate iterable logic (Shantanu, PR [16006](https://github.com/python/mypy/pull/16006))
  * Fix inference for properties with `__call__` (Shantanu, PR [15926](https://github.com/python/mypy/pull/15926))
 
+#### Typeshed Updates
+
+Please see [git log](https://github.com/python/typeshed/commits/main?after=4a854366e03dee700109f8e758a08b2457ea2f51+0&branch=main&path=stdlib) for full list of standard library typeshed stub changes.
+
 #### Acknowledgements
-...
+
+Thanks to all mypy contributors who contributed to this release:
+
+* Albert Tugushev
+* Alex Waygood
+* Ali Hamdan
+* Anders Kaseorg
+* Anthony Sottile
+* Chad Dombrova
+* Cibin Mathew
+* dinaldoap
+* DS/Charlie
+* Eli Schwartz
+* Ganden Schaffner
+* Hamir Mahal
+* Ihor
+* Ikko Eltociear Ashimine
+* Ilya Priven
+* Ivan Levkivskyi
+* Jelle Zijlstra
+* Jukka Lehtosalo
+* Jørgen Lind
+* KotlinIsland
+* Matt Bogosian
+* Nikita Sobolev
+* Petter Friberg
+* Randolf Scholz
+* Shantanu
+* Thomas Grainger
+* Valentin Stanciu
+
+I’d also like to thank my employer, Dropbox, for supporting mypy development.
+
+Posted by Jukka Lehtosalo
 
 ## Mypy 1.6
 

From cd1ce2fd396dc33c7d3b8049a830870e80d48e1b Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Thu, 9 Nov 2023 16:22:19 +0000
Subject: [PATCH 236/288] Improvements to changelog for mypy 1.7 (#16439)

Add sections covering bigger changes.
---
 CHANGELOG.md | 78 +++++++++++++++++++++++++++++++++++++++++++++-------
 1 file changed, 68 insertions(+), 10 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 17b0084d6e5d..2cc8da9db0de 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,8 @@
 
 ## Next release
 
+Stubgen will now include `__all__` in its output if it is in the input file (PR [16356](https://github.com/python/mypy/pull/16356)).
+
 ## Mypy 1.7 [unreleased]
 
 We’ve just uploaded mypy 1.7 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:
@@ -12,11 +14,32 @@ We’ve just uploaded mypy 1.7 to the Python Package Index ([PyPI](https://pypi.
 
 You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
 
-Stubgen will now include `__all__` in its output if it is in the input file (PR [16356](https://github.com/python/mypy/pull/16356)).
+#### Using TypedDict for `**kwargs` Typing
 
-#### TypeVarTuple and Unpack Support Enabled
+Mypy now has support for using `Unpack[...]` with a TypedDict type to annotate `**kwargs` arguments enabled by default. Example:
+
+```
+# Or 'from typing_extensions import ...'
+from typing import TypedDict, Unpack
 
-TODO: Explain
+class Person(TypedDict):
+    name: str
+    age: int
+
+def foo(**kwargs: Unpack[Person]) -> None:
+    ...
+
+foo(name="x", age=1)  # Ok
+foo(name=1)  # Error
+```
+
+Refer to [PEP 692](https://peps.python.org/pep-0692/) for the details.
+
+This was contributed by Ivan Levkivskyi back in 2022 ([PR 13471](https://github.com/python/mypy/pull/13471)).
+
+#### TypeVarTuple Support Enabled (Experimental)
+
+Mypy now has support for variadic generics (TypeVarTuple) enabled by default, as an experimental feature. Refer to [PEP 646](https://peps.python.org/pep-0646/) for the details.
 
 TypeVarTuple was implemented by Jared Hance and Ivan Levkivskyi over several mypy releases, with help from Jukka Lehtosalo.
 
@@ -35,15 +58,50 @@ Changes included in this release:
  * Complete type analysis of variadic types (Ivan Levkivskyi, PR [15991](https://github.com/python/mypy/pull/15991))
  * Allow TypedDict unpacking in Callable types (Ivan Levkivskyi, PR [16083](https://github.com/python/mypy/pull/16083))
 
-#### Major Changes
+#### New Way of Installing Mypyc Dependencies
+
+If you want to install package dependencies needed by mypyc (not just mypy), you should now install `mypy[mypyc]` instead of just `mypy`:
+
+```
+python3 -m pip install -U 'mypy[mypyc]'
+```
+
+Mypy has many more users than mypyc, so always installing mypyc dependencies would often bring unnecessary dependencies.
+
+This change was contributed by Shantanu (PR [16229](https://github.com/python/mypy/pull/16229)).
+
+#### New Rules for Re-exports
+
+Mypy no longer considers an import such as `import a.b as b` as an explicit re-export. The old behavior was arguably inconsistent and surprising. This may impact some stub packages, such as older versions of `types-six`. You can change the import to `from a import b as b`, if treating the import as a re-export was intentional.
+
+This change was contributed by Anders Kaseorg (PR [14086](https://github.com/python/mypy/pull/14086)).
+
+#### Improved Type Inference
+
+The new type inference algorithm that was recently introduced to mypy (but was not enabled by default) is now enabled by default. It improves type inference of calls to generic callables where an argument is also a generic callable, in particular. You can use `--old-type-inference` to disable the new behavior.
+
+The new algorithm can (rarely) produce different error messages, different error codes, or errors reported on different lines. This is more likely in cases where generic types were used incorrectly.
+
+The new type inference algorithm was contributed by Ivan Levkivskyi. PR [16345](https://github.com/python/mypy/pull/16345) enabled it by default.
+
+#### Narrowing Tuple Types Using len()
+
+Mypy now can narrow tuple types using `len()` checks. Example:
+
+```
+def f(t: tuple[int, int] | tuple[int, int, int]) -> None:
+    if len(t) == 2:
+        a, b = t   # Ok
+    ...
+```
+
+This feature was contributed by Ivan Levkivskyi (PR [16237](https://github.com/python/mypy/pull/16237)).
+
+#### Mypy Changelog
 
-TODO: Write sections about these?
+We now maintain a [changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) in the mypy Git repository. It mirrors the contents of [mypy release blog posts](https://mypy-lang.blogspot.com/). We will continue to also publish release blog posts. In the future, release blog posts will be created based on the changelog near a release date.
 
- * Enable new type inference by default (Ivan Levkivskyi, PR [16345](https://github.com/python/mypy/pull/16345))
- * Narrow tuple types using len() (Ivan Levkivskyi, PR [16237](https://github.com/python/mypy/pull/16237))
- * Do not consider `import a.b as b` an explicit reexport (Anders Kaseorg, PR [14086](https://github.com/python/mypy/pull/14086))
- * Add an extra for mypyc dependencies (Shantanu, PR [16229](https://github.com/python/mypy/pull/16229))
- * Add a changelog (Shantanu, PR [16280](https://github.com/python/mypy/pull/16280))
+This was contributed by Shantanu (PR [16280](https://github.com/python/mypy/pull/16280)).
 
 #### Mypy Daemon Improvements
 

From 1dc07b3b3d703f0b7b4564fac99461d0144d5870 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Thu, 9 Nov 2023 16:49:06 +0000
Subject: [PATCH 237/288] Minor tweaks to 1.7 changelog (#16440)

---
 CHANGELOG.md | 66 ++++++++++++++++++++++++++--------------------------
 1 file changed, 33 insertions(+), 33 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2cc8da9db0de..bbc8e4714423 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -49,14 +49,13 @@ Changes included in this release:
  * Fix crash on unpack call special-casing (Ivan Levkivskyi, PR [16381](https://github.com/python/mypy/pull/16381))
  * Some final touches for variadic types support (Ivan Levkivskyi, PR [16334](https://github.com/python/mypy/pull/16334))
  * Support PEP-646 and PEP-692 in the same callable (Ivan Levkivskyi, PR [16294](https://github.com/python/mypy/pull/16294))
- * Support fancy new syntax for variadic types (Ivan Levkivskyi, PR [16242](https://github.com/python/mypy/pull/16242))
+ * Support new `*` syntax for variadic types (Ivan Levkivskyi, PR [16242](https://github.com/python/mypy/pull/16242))
  * Correctly handle variadic instances with empty arguments (Ivan Levkivskyi, PR [16238](https://github.com/python/mypy/pull/16238))
  * Correctly handle runtime type applications of variadic types (Ivan Levkivskyi, PR [16240](https://github.com/python/mypy/pull/16240))
  * Support variadic tuple packing/unpacking (Ivan Levkivskyi, PR [16205](https://github.com/python/mypy/pull/16205))
  * Better support for variadic calls and indexing (Ivan Levkivskyi, PR [16131](https://github.com/python/mypy/pull/16131))
- * Subtyping and inference of user defined variadic types (Ivan Levkivskyi, PR [16076](https://github.com/python/mypy/pull/16076))
+ * Subtyping and inference of user-defined variadic types (Ivan Levkivskyi, PR [16076](https://github.com/python/mypy/pull/16076))
  * Complete type analysis of variadic types (Ivan Levkivskyi, PR [15991](https://github.com/python/mypy/pull/15991))
- * Allow TypedDict unpacking in Callable types (Ivan Levkivskyi, PR [16083](https://github.com/python/mypy/pull/16083))
 
 #### New Way of Installing Mypyc Dependencies
 
@@ -110,10 +109,10 @@ This was contributed by Shantanu (PR [16280](https://github.com/python/mypy/pull
  * Fix dmypy inspect on Windows (Ivan Levkivskyi, PR [16355](https://github.com/python/mypy/pull/16355))
  * Fix dmypy inspect for namespace packages (Ivan Levkivskyi, PR [16357](https://github.com/python/mypy/pull/16357))
  * Fix return type change to optional in generic function (Jukka Lehtosalo, PR [16342](https://github.com/python/mypy/pull/16342))
- * Fix daemon false positives related to module-level __getattr__ (Jukka Lehtosalo, PR [16292](https://github.com/python/mypy/pull/16292))
- * Attempt to fix daemon crash related to ABCs (Jukka Lehtosalo, PR [16275](https://github.com/python/mypy/pull/16275))
+ * Fix daemon false positives related to module-level `__getattr__` (Jukka Lehtosalo, PR [16292](https://github.com/python/mypy/pull/16292))
+ * Fix daemon crash related to ABCs (Jukka Lehtosalo, PR [16275](https://github.com/python/mypy/pull/16275))
  * Stream dmypy output instead of dumping everything at the end (Valentin Stanciu, PR [16252](https://github.com/python/mypy/pull/16252))
- * Show dmypy errors post serving (Valentin Stanciu, PR [16250](https://github.com/python/mypy/pull/16250))
+ * Make sure all dmypy errors are shown (Valentin Stanciu, PR [16250](https://github.com/python/mypy/pull/16250))
 
 #### Mypyc Improvements
 
@@ -123,70 +122,70 @@ This was contributed by Shantanu (PR [16280](https://github.com/python/mypy/pull
  * Fix direct `__dict__` access on inner functions in new Python (Shantanu, PR [16084](https://github.com/python/mypy/pull/16084))
  * Make tuple packing and unpacking more efficient (Jukka Lehtosalo, PR [16022](https://github.com/python/mypy/pull/16022))
 
-#### Improvements to Error Reportong
+#### Improvements to Error Reporting
 
- * Update starred expr error message to match Python's (Cibin Mathew, PR [16304](https://github.com/python/mypy/pull/16304))
- * Add `unimported-reveal` error code (Nikita Sobolev, PR [16271](https://github.com/python/mypy/pull/16271))
- * Fix error code on "Maybe you forgot to use await" note (Jelle Zijlstra, PR [16203](https://github.com/python/mypy/pull/16203))
- * Introduce error category [unsafe-overload] (Randolf Scholz, PR [16061](https://github.com/python/mypy/pull/16061))
+ * Update starred expression error message to match CPython (Cibin Mathew, PR [16304](https://github.com/python/mypy/pull/16304))
+ * Fix error code of "Maybe you forgot to use await" note (Jelle Zijlstra, PR [16203](https://github.com/python/mypy/pull/16203))
+ * Use error code `[unsafe-overload]` for unsafe overloads, instead of `[misc]` (Randolf Scholz, PR [16061](https://github.com/python/mypy/pull/16061))
  * Reword the error message related to void functions (Albert Tugushev, PR [15876](https://github.com/python/mypy/pull/15876))
  * Represent bottom type as Never in messages (Shantanu, PR [15996](https://github.com/python/mypy/pull/15996))
  * Add hint for AsyncIterator incompatible return type (Ilya Priven, PR [15883](https://github.com/python/mypy/pull/15883))
- * Remove stubs packages from `stubinfo.py` where the runtime package has added a `py.typed` file (Alex Waygood, PR [16226](https://github.com/python/mypy/pull/16226))
+ * Don't suggest stubs packages where the runtime package now ships with types (Alex Waygood, PR [16226](https://github.com/python/mypy/pull/16226))
 
 #### Performance Improvements
 
  * Speed up type argument checking (Jukka Lehtosalo, PR [16353](https://github.com/python/mypy/pull/16353))
  * Add fast path for checking self types (Jukka Lehtosalo, PR [16352](https://github.com/python/mypy/pull/16352))
  * Cache information about whether file is typeshed file (Jukka Lehtosalo, PR [16351](https://github.com/python/mypy/pull/16351))
- * Skip expensive repr() in logging call when not needed (Jukka Lehtosalo, PR [16350](https://github.com/python/mypy/pull/16350))
+ * Skip expensive `repr()` in logging call when not needed (Jukka Lehtosalo, PR [16350](https://github.com/python/mypy/pull/16350))
 
 #### Attrs and Dataclass Improvements
 
- * dataclass.replace: allow transformed classes (Ilya Priven, PR [15915](https://github.com/python/mypy/pull/15915))
- * docs: document dataclass_transform behavior (Ilya Priven, PR [16017](https://github.com/python/mypy/pull/16017))
- * dataclasses.replace: fall through to typeshed sig (Ilya Priven, PR [15962](https://github.com/python/mypy/pull/15962))
- * attrs: remove fields type check (Ilya Priven, PR [15983](https://github.com/python/mypy/pull/15983))
- * attrs, dataclasses: don't enforce slots when base doesn't (Ilya Priven, PR [15976](https://github.com/python/mypy/pull/15976))
+ * `dataclass.replace`: Allow transformed classes (Ilya Priven, PR [15915](https://github.com/python/mypy/pull/15915))
+ * `dataclass.replace`: Fall through to typeshed signature (Ilya Priven, PR [15962](https://github.com/python/mypy/pull/15962))
+ * Document `dataclass_transform` behavior (Ilya Priven, PR [16017](https://github.com/python/mypy/pull/16017))
+ * `attrs`: Remove fields type check (Ilya Priven, PR [15983](https://github.com/python/mypy/pull/15983))
+ * `attrs`, `dataclasses`: Don't enforce slots when base class doesn't (Ilya Priven, PR [15976](https://github.com/python/mypy/pull/15976))
+ * Fix crash on dataclass field / property collision (Nikita Sobolev, PR [16147](https://github.com/python/mypy/pull/16147))
 
 #### Stubgen Improvements
 
  * Write stubs with utf-8 encoding (Jørgen Lind, PR [16329](https://github.com/python/mypy/pull/16329))
- * stubgen: fix missing property setter in semantic analysis mode (Ali Hamdan, PR [16303](https://github.com/python/mypy/pull/16303))
- * stubgen: unify C extension and pure python stub generators with object oriented design (Chad Dombrova, PR [15770](https://github.com/python/mypy/pull/15770))
- * stubgen: multiple fixes to the generated imports (Ali Hamdan, PR [15624](https://github.com/python/mypy/pull/15624))
- * stubgen: generate valid dataclass stubs (Ali Hamdan, PR [15625](https://github.com/python/mypy/pull/15625))
+ * Fix missing property setter in semantic analysis mode (Ali Hamdan, PR [16303](https://github.com/python/mypy/pull/16303))
+ * Unify C extension and pure python stub generators with object oriented design (Chad Dombrova, PR [15770](https://github.com/python/mypy/pull/15770))
+ * Multiple fixes to the generated imports (Ali Hamdan, PR [15624](https://github.com/python/mypy/pull/15624))
+ * Generate valid dataclass stubs (Ali Hamdan, PR [15625](https://github.com/python/mypy/pull/15625))
 
 #### Fixes to Crashes
 
- * Fix incremental crash on TypedDict in method (Ivan Levkivskyi, PR [16364](https://github.com/python/mypy/pull/16364))
- * Fix crash on dataclass field / property collision (Nikita Sobolev, PR [16147](https://github.com/python/mypy/pull/16147))
+ * Fix incremental mode crash on TypedDict in method (Ivan Levkivskyi, PR [16364](https://github.com/python/mypy/pull/16364))
  * Fix crash on star unpack in TypedDict (Ivan Levkivskyi, PR [16116](https://github.com/python/mypy/pull/16116))
  * Fix crash on malformed TypedDict in incremental mode (Ivan Levkivskyi, PR [16115](https://github.com/python/mypy/pull/16115))
- * Fix crash with report generation on namespace packages (again) (Shantanu, PR [16019](https://github.com/python/mypy/pull/16019))
+ * Fix crash with report generation on namespace packages (Shantanu, PR [16019](https://github.com/python/mypy/pull/16019))
  * Fix crash when parsing error code config with typo (Shantanu, PR [16005](https://github.com/python/mypy/pull/16005))
  * Fix `__post_init__()` internal error (Ilya Priven, PR [16080](https://github.com/python/mypy/pull/16080))
 
 #### Documentation Updates
 
- * Make it easier to copy commands from docs README (Hamir Mahal, PR [16133](https://github.com/python/mypy/pull/16133))
- * Document and rename overload-overlap error code (Shantanu, PR [16074](https://github.com/python/mypy/pull/16074))
- * Add docs about `--force-uppercase-builtins` and `--force-union-syntax` (Nikita Sobolev, PR [16049](https://github.com/python/mypy/pull/16049))
+ * Make it easier to copy commands from README (Hamir Mahal, PR [16133](https://github.com/python/mypy/pull/16133))
+ * Document and rename `[overload-overlap]` error code (Shantanu, PR [16074](https://github.com/python/mypy/pull/16074))
+ * Document `--force-uppercase-builtins` and `--force-union-syntax` (Nikita Sobolev, PR [16049](https://github.com/python/mypy/pull/16049))
  * Document `force_union_syntax` and `force_uppercase_builtins` (Nikita Sobolev, PR [16048](https://github.com/python/mypy/pull/16048))
  * Document we're not tracking relationships between symbols (Ilya Priven, PR [16018](https://github.com/python/mypy/pull/16018))
 
 #### Other Notable Changes and Fixes
 
- * Avoid importing from setuptools._distutils (Shantanu, PR [16348](https://github.com/python/mypy/pull/16348))
+ * Avoid importing from `setuptools._distutils` (Shantanu, PR [16348](https://github.com/python/mypy/pull/16348))
  * Delete recursive aliases flags (Ivan Levkivskyi, PR [16346](https://github.com/python/mypy/pull/16346))
  * Properly use proper subtyping for callables (Ivan Levkivskyi, PR [16343](https://github.com/python/mypy/pull/16343))
  * Use upper bound as inference fallback more consistently (Ivan Levkivskyi, PR [16344](https://github.com/python/mypy/pull/16344))
+ * Add `[unimported-reveal]` error code (Nikita Sobolev, PR [16271](https://github.com/python/mypy/pull/16271))
  * Add `|=` and `|` operators support for `TypedDict` (Nikita Sobolev, PR [16249](https://github.com/python/mypy/pull/16249))
  * Clarify variance convention for Parameters (Ivan Levkivskyi, PR [16302](https://github.com/python/mypy/pull/16302))
  * Correctly recognize `typing_extensions.NewType` (Ganden Schaffner, PR [16298](https://github.com/python/mypy/pull/16298))
  * Fix partially defined in the case of missing type maps (Shantanu, PR [15995](https://github.com/python/mypy/pull/15995))
  * Use SPDX license identifier (Nikita Sobolev, PR [16230](https://github.com/python/mypy/pull/16230))
- * `__qualname__` and `__module__` are available in class bodies (Anthony Sottile, PR [16215](https://github.com/python/mypy/pull/16215))
+ * Make `__qualname__` and `__module__` available in class bodies (Anthony Sottile, PR [16215](https://github.com/python/mypy/pull/16215))
  * stubtest: Hint when args in stub need to be keyword-only (Alex Waygood, PR [16210](https://github.com/python/mypy/pull/16210))
  * Tuple slice should not propagate fallback (Thomas Grainger, PR [16154](https://github.com/python/mypy/pull/16154))
  * Fix cases of type object handling for overloads (Shantanu, PR [16168](https://github.com/python/mypy/pull/16168))
@@ -194,16 +193,17 @@ This was contributed by Shantanu (PR [16280](https://github.com/python/mypy/pull
  * Use type variable bound when it appears as actual during inference (Ivan Levkivskyi, PR [16178](https://github.com/python/mypy/pull/16178))
  * Use upper bounds as fallback solutions for inference (Ivan Levkivskyi, PR [16184](https://github.com/python/mypy/pull/16184))
  * Special-case type inference of empty collections (Ivan Levkivskyi, PR [16122](https://github.com/python/mypy/pull/16122))
+ * Allow TypedDict unpacking in Callable types (Ivan Levkivskyi, PR [16083](https://github.com/python/mypy/pull/16083))
  * Fix inference for overloaded `__call__` with generic self (Shantanu, PR [16053](https://github.com/python/mypy/pull/16053))
  * Call dynamic class hook on generic classes (Petter Friberg, PR [16052](https://github.com/python/mypy/pull/16052))
  * Preserve implicitly exported types via attribute access (Shantanu, PR [16129](https://github.com/python/mypy/pull/16129))
  * Fix a stubtest bug (Alex Waygood)
  * Fix `tuple[Any, ...]` subtyping (Shantanu, PR [16108](https://github.com/python/mypy/pull/16108))
  * Lenient handling of trivial Callable suffixes (Ivan Levkivskyi, PR [15913](https://github.com/python/mypy/pull/15913))
- * Add `add_overloaded_method_to_class` helper to `plugins/common.py` (Nikita Sobolev, PR [16038](https://github.com/python/mypy/pull/16038))
+ * Add `add_overloaded_method_to_class` helper for plugins (Nikita Sobolev, PR [16038](https://github.com/python/mypy/pull/16038))
  * Bundle `misc/proper_plugin.py` as a part of `mypy` (Nikita Sobolev, PR [16036](https://github.com/python/mypy/pull/16036))
- * Fix case Any() in match statement (DS/Charlie, PR [14479](https://github.com/python/mypy/pull/14479))
- * Deduplicate iterable logic (Shantanu, PR [16006](https://github.com/python/mypy/pull/16006))
+ * Fix `case Any()` in match statement (DS/Charlie, PR [14479](https://github.com/python/mypy/pull/14479))
+ * Make iterable logic more consistent (Shantanu, PR [16006](https://github.com/python/mypy/pull/16006))
  * Fix inference for properties with `__call__` (Shantanu, PR [15926](https://github.com/python/mypy/pull/15926))
 
 #### Typeshed Updates

From b67dc53ad5bee837ea0de7ece9f9e14c634a923e Mon Sep 17 00:00:00 2001
From: robjhornby <robjhornby@gmail.com>
Date: Fri, 10 Nov 2023 00:59:16 +0000
Subject: [PATCH 238/288] Handle TypeVarTupleType when checking overload
 constraints (#16428)

Fixes https://github.com/python/mypy/issues/16427

The test case added in the first commit crashes.

The second commit addresses the crash - I don't know whether this fix is
correct, it just happens to stop the crash but it leads to a code branch
which just `continue`s out of a for loop iteration, so it might be
bypassing something it shouldn't. I don't completely understand it.

---------

Co-authored-by: Ivan Levkivskyi <levkivskyi@gmail.com>
---
 mypy/constraints.py                     |  2 +-
 test-data/unit/check-typevar-tuple.test | 18 ++++++++++++++++++
 2 files changed, 19 insertions(+), 1 deletion(-)

diff --git a/mypy/constraints.py b/mypy/constraints.py
index 49e542a49e56..88ede372e011 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -949,7 +949,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]:
             for item in actual.items:
                 if isinstance(item, UnpackType):
                     unpacked = get_proper_type(item.type)
-                    if isinstance(unpacked, TypeVarType):
+                    if isinstance(unpacked, TypeVarTupleType):
                         # Cannot infer anything for T from [T, ...] <: *Ts
                         continue
                     assert (
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index e85863f0ed04..25babf442d21 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -1789,6 +1789,24 @@ def test(a: Container[Any], b: Container[int], c: Container[str]):
     reveal_type(build(b, c))  # N: Revealed type is "__main__.Array[builtins.int, builtins.str]"
 [builtins fixtures/tuple.pyi]
 
+[case testTypeVarTupleOverloadArbitraryLength]
+from typing import Any, Tuple, TypeVar, TypeVarTuple, Unpack, overload
+
+T = TypeVar("T")
+Ts = TypeVarTuple("Ts")
+@overload
+def add(self: Tuple[Unpack[Ts]], other: Tuple[T]) -> Tuple[Unpack[Ts], T]:
+    ...
+@overload
+def add(self: Tuple[T, ...], other: Tuple[T, ...]) -> Tuple[T, ...]:
+    ...
+def add(self: Any, other: Any) -> Any:
+    ...
+def test(a: Tuple[int, str], b: Tuple[bool], c: Tuple[bool, ...]):
+    reveal_type(add(a, b))  # N: Revealed type is "Tuple[builtins.int, builtins.str, builtins.bool]"
+    reveal_type(add(b, c))  # N: Revealed type is "builtins.tuple[builtins.bool, ...]"
+[builtins fixtures/tuple.pyi]
+
 [case testTypeVarTupleIndexOldStyleNonNormalizedAndNonLiteral]
 from typing import Any, Tuple
 from typing_extensions import Unpack

From f05663d31e6d9097b152ecf5825b8d9dc50cea54 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 10 Nov 2023 11:32:05 +0000
Subject: [PATCH 239/288] Fix handling of tuple type context with unpacks
 (#16444)

Fixes https://github.com/python/mypy/issues/16425

Fix is straightforward.
---
 mypy/checkexpr.py                       | 10 +++++-----
 test-data/unit/check-typevar-tuple.test | 10 ++++++++++
 2 files changed, 15 insertions(+), 5 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index 056b2f7bd2c6..c87d1f6cd31c 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -4902,7 +4902,7 @@ def tuple_context_matches(self, expr: TupleExpr, ctx: TupleType) -> bool:
             return len([e for e in expr.items if not isinstance(e, StarExpr)]) <= len(ctx.items)
         # For variadic context, the only easy case is when structure matches exactly.
         # TODO: try using tuple type context in more cases.
-        if len([e for e in expr.items if not isinstance(e, StarExpr)]) != 1:
+        if len([e for e in expr.items if isinstance(e, StarExpr)]) != 1:
             return False
         expr_star_index = next(i for i, lv in enumerate(expr.items) if isinstance(lv, StarExpr))
         return len(expr.items) == len(ctx.items) and ctx_unpack_index == expr_star_index
@@ -4941,6 +4941,9 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type:
         if type_context_items is not None:
             unpack_in_context = find_unpack_in_list(type_context_items) is not None
         seen_unpack_in_items = False
+        allow_precise_tuples = (
+            unpack_in_context or PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature
+        )
 
         # Infer item types.  Give up if there's a star expression
         # that's not a Tuple.
@@ -4981,10 +4984,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type:
                         # result in an error later, just do something predictable here.
                         j += len(tt.items)
                 else:
-                    if (
-                        PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature
-                        and not seen_unpack_in_items
-                    ):
+                    if allow_precise_tuples and not seen_unpack_in_items:
                         # Handle (x, *y, z), where y is e.g. tuple[Y, ...].
                         if isinstance(tt, Instance) and self.chk.type_is_iterable(tt):
                             item_type = self.chk.iterable_item_type(tt, e)
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 25babf442d21..487f22699724 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -2225,3 +2225,13 @@ bar(*keys, 1)  # OK
 reveal_type(baz(keys, 1))  # N: Revealed type is "builtins.object"
 reveal_type(baz(*keys, 1))  # N: Revealed type is "builtins.int"
 [builtins fixtures/tuple.pyi]
+
+[case testVariadicTupleContextNoCrash]
+from typing import Tuple, Unpack
+
+x: Tuple[int, Unpack[Tuple[int, ...]]] = ()  # E: Incompatible types in assignment (expression has type "Tuple[()]", variable has type "Tuple[int, Unpack[Tuple[int, ...]]]")
+y: Tuple[int, Unpack[Tuple[int, ...]]] = (1, 2)
+z: Tuple[int, Unpack[Tuple[int, ...]]] = (1,)
+w: Tuple[int, Unpack[Tuple[int, ...]]] = (1, *[2, 3, 4])
+t: Tuple[int, Unpack[Tuple[int, ...]]] = (1, *(2, 3, 4))
+[builtins fixtures/tuple.pyi]

From ec9004677c4e1b749cab89494e60f0c6727d2162 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Fri, 10 Nov 2023 11:56:16 +0000
Subject: [PATCH 240/288] Changelog updates for mypy 1.7 (#16446)

Address feedback and include recently cherry-picked PRs.
---
 CHANGELOG.md | 20 +++++++++++++++++++-
 1 file changed, 19 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index bbc8e4714423..9b3cd19e6df1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -33,7 +33,14 @@ foo(name="x", age=1)  # Ok
 foo(name=1)  # Error
 ```
 
-Refer to [PEP 692](https://peps.python.org/pep-0692/) for the details.
+The definition of `foo` above is equivalent to the one below, with keyword-only arguments `name` and `age`:
+
+```
+def foo(*, name: str, age: int) -> None:
+    ...
+```
+
+Refer to [PEP 692](https://peps.python.org/pep-0692/) for more information. Note that unlike in the current version of the PEP, mypy always treats signatures with `Unpack[SomeTypedDict]` as equivalent to their expanded forms with explicit keyword arguments, and there aren't special type checking rules for TypedDict arguments.
 
 This was contributed by Ivan Levkivskyi back in 2022 ([PR 13471](https://github.com/python/mypy/pull/13471)).
 
@@ -45,6 +52,8 @@ TypeVarTuple was implemented by Jared Hance and Ivan Levkivskyi over several myp
 
 Changes included in this release:
 
+ * Fix handling of tuple type context with unpacks (Ivan Levkivskyi, PR [16444](https://github.com/python/mypy/pull/16444))
+ * Handle TypeVarTuples when checking overload constraints (robjhornby, PR [16428](https://github.com/python/mypy/pull/16428))
  * Enable Unpack/TypeVarTuple support (Ivan Levkivskyi, PR [16354](https://github.com/python/mypy/pull/16354))
  * Fix crash on unpack call special-casing (Ivan Levkivskyi, PR [16381](https://github.com/python/mypy/pull/16381))
  * Some final touches for variadic types support (Ivan Levkivskyi, PR [16334](https://github.com/python/mypy/pull/16334))
@@ -96,6 +105,14 @@ def f(t: tuple[int, int] | tuple[int, int, int]) -> None:
 
 This feature was contributed by Ivan Levkivskyi (PR [16237](https://github.com/python/mypy/pull/16237)).
 
+#### More Precise Tuple Lenghts (Experimental)
+
+Mypy supports experimental, more precise checking of tuple type lengths through `--enable-incomplete-feature=PreciseTupleTypes`. Refer to the [documentation](https://mypy.readthedocs.io/en/latest/command_line.html#enabling-incomplete-experimental-features) for more information.
+
+More generally, we are planning to use `--enable-incomplete-feature` to introduce experimental features that would benefit from community feedback.
+
+This feature was contributed by Ivan Levkivskyi (PR [16237](https://github.com/python/mypy/pull/16237)).
+
 #### Mypy Changelog
 
 We now maintain a [changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) in the mypy Git repository. It mirrors the contents of [mypy release blog posts](https://mypy-lang.blogspot.com/). We will continue to also publish release blog posts. In the future, release blog posts will be created based on the changelog near a release date.
@@ -175,6 +192,7 @@ This was contributed by Shantanu (PR [16280](https://github.com/python/mypy/pull
 
 #### Other Notable Changes and Fixes
 
+ * Propagate narrowed types to lambda expressions (Ivan Levkivskyi, PR [16407](https://github.com/python/mypy/pull/16407))
  * Avoid importing from `setuptools._distutils` (Shantanu, PR [16348](https://github.com/python/mypy/pull/16348))
  * Delete recursive aliases flags (Ivan Levkivskyi, PR [16346](https://github.com/python/mypy/pull/16346))
  * Properly use proper subtyping for callables (Ivan Levkivskyi, PR [16343](https://github.com/python/mypy/pull/16343))

From 85f405a4cd8c00f1297609829c8c5d12f66fae9d Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Fri, 10 Nov 2023 14:19:23 +0000
Subject: [PATCH 241/288] Script to generate release blog post from changelog
 (#16447)

This is an adaptation of an older script that we used to convert from a
Paper doc to HTML.

This includes changes by @gvanrossum and @svalentin.
---
 misc/gen_blog_post_html.py | 171 +++++++++++++++++++++++++++++++++++++
 1 file changed, 171 insertions(+)
 create mode 100644 misc/gen_blog_post_html.py

diff --git a/misc/gen_blog_post_html.py b/misc/gen_blog_post_html.py
new file mode 100644
index 000000000000..7170696d5d09
--- /dev/null
+++ b/misc/gen_blog_post_html.py
@@ -0,0 +1,171 @@
+"""Converter from CHANGELOG.md (Markdown) to HTML suitable for a mypy blog post.
+
+How to use:
+
+1. Write release notes in CHANGELOG.md.
+2. Make sure the heading for the next release is of form `## Mypy X.Y`.
+2. Run `misc/gen_blog_post_html.py X.Y > target.html`.
+4. Manually inspect and tweak the result.
+
+Notes:
+
+* There are some fragile assumptions. Double check the output.
+"""
+
+import argparse
+import html
+import os
+import re
+import sys
+
+
+def format_lists(h: str) -> str:
+    a = h.splitlines()
+    r = []
+    i = 0
+    bullets = ("- ", "* ", " * ")
+    while i < len(a):
+        if a[i].startswith(bullets):
+            r.append("<p><ul>")
+            while i < len(a) and a[i].startswith(bullets):
+                r.append("<li>%s" % a[i][2:].lstrip())
+                i += 1
+            r.append("</ul>")
+        else:
+            r.append(a[i])
+            i += 1
+    return "\n".join(r)
+
+
+def format_code(h: str) -> str:
+    a = h.splitlines()
+    r = []
+    i = 0
+    while i < len(a):
+        if a[i].startswith("    ") or a[i].startswith("```"):
+            indent = a[i].startswith("    ")
+            if not indent:
+                i += 1
+            r.append("<pre>")
+            while i < len(a) and (
+                (indent and a[i].startswith("    ")) or (not indent and not a[i].startswith("```"))
+            ):
+                # Undo &gt; and &lt;
+                line = a[i].replace("&gt;", ">").replace("&lt;", "<")
+                if not indent:
+                    line = "    " + line
+                r.append(html.escape(line))
+                i += 1
+            r.append("</pre>")
+            if not indent and a[i].startswith("```"):
+                i += 1
+        else:
+            r.append(a[i])
+            i += 1
+    return "\n".join(r)
+
+
+def convert(src: str) -> str:
+    h = src
+
+    # Replace < and >.
+    h = re.sub(r"<", "&lt;", h)
+    h = re.sub(r">", "&gt;", h)
+
+    # Title
+    h = re.sub(r"^## (Mypy [0-9.]+)", r"<h1>\1 Released</h1>", h, flags=re.MULTILINE)
+
+    # Subheadings
+    h = re.sub(r"\n#### ([A-Z`].*)\n", r"\n<h2>\1</h2>\n", h)
+
+    # Sub-subheadings
+    h = re.sub(r"\n\*\*([A-Z_`].*)\*\*\n", r"\n<h3>\1</h3>\n", h)
+    h = re.sub(r"\n`\*\*([A-Z_`].*)\*\*\n", r"\n<h3>`\1</h3>\n", h)
+
+    # Translate `**`
+    h = re.sub(r"`\*\*`", "<tt>**</tt>", h)
+
+    # Paragraphs
+    h = re.sub(r"\n([A-Z])", r"\n<p>\1", h)
+
+    # Bullet lists
+    h = format_lists(h)
+
+    # Code blocks
+    h = format_code(h)
+
+    # Code fragments
+    h = re.sub(r"`([^`]+)`", r"<tt>\1</tt>", h)
+
+    # Remove **** noise
+    h = re.sub(r"\*\*\*\*", "", h)
+
+    # Bold text
+    h = re.sub(r"\*\*([A-Za-z].*?)\*\*", r" <b>\1</b>", h)
+
+    # Emphasized text
+    h = re.sub(r" \*([A-Za-z].*?)\*", r" <i>\1</i>", h)
+
+    # Remove redundant PR links to avoid double links (they will be generated below)
+    h = re.sub(r"\[(#[0-9]+)\]\(https://github.com/python/mypy/pull/[0-9]+/?\)", r"\1", h)
+
+    # Issue and PR links
+    h = re.sub(r"\((#[0-9]+)\) +\(([^)]+)\)", r"(\2, \1)", h)
+    h = re.sub(
+        r"fixes #([0-9]+)",
+        r'fixes issue <a href="https://github.com/python/mypy/issues/\1">\1</a>',
+        h,
+    )
+    h = re.sub(r"#([0-9]+)", r'PR <a href="https://github.com/python/mypy/pull/\1">\1</a>', h)
+    h = re.sub(r"\) \(PR", ", PR", h)
+
+    # Markdown links
+    h = re.sub(r"\[([^]]*)\]\(([^)]*)\)", r'<a href="\2">\1</a>', h)
+
+    # Add random links in case they are missing
+    h = re.sub(
+        r"contributors to typeshed:",
+        'contributors to <a href="https://github.com/python/typeshed">typeshed</a>:',
+        h,
+    )
+
+    # Add missing top-level HTML tags
+    h = '<html>\n<meta charset="utf-8" />\n<body>\n' + h + "</body>\n</html>"
+
+    return h
+
+
+def extract_version(src: str, version: str) -> str:
+    a = src.splitlines()
+    i = 0
+    heading = f"## Mypy {version}"
+    while i < len(a):
+        if a[i].strip() == heading:
+            break
+        i += 1
+    else:
+        raise RuntimeError(f"Can't find heading {heading!r}")
+    j = i + 1
+    while not a[j].startswith("## "):
+        j += 1
+    return "\n".join(a[i:j])
+
+
+def main() -> None:
+    parser = argparse.ArgumentParser(
+        description="Generate HTML release blog post based on CHANGELOG.md and write to stdout."
+    )
+    parser.add_argument("version", help="mypy version, in form X.Y or X.Y.Z")
+    args = parser.parse_args()
+    version: str = args.version
+    if not re.match(r"[0-9]+(\.[0-9]+)+$", version):
+        sys.exit(f"error: Version must be of form X.Y or X.Y.Z, not {version!r}")
+    changelog_path = os.path.join(os.path.dirname(__file__), os.path.pardir, "CHANGELOG.md")
+    src = open(changelog_path).read()
+    src = extract_version(src, version)
+    dst = convert(src)
+    sys.stdout.write(dst)
+
+
+if __name__ == "__main__":
+    main()

From 4f020e7b6f12b3d0c32372667d99dbc13b715429 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Fri, 10 Nov 2023 14:25:45 +0000
Subject: [PATCH 242/288] Mark mypy 1.7 changelog as final/released (#16448)

We are now very close to release.
---
 CHANGELOG.md | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9b3cd19e6df1..d59fe36901be 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,13 +4,11 @@
 
 Stubgen will now include `__all__` in its output if it is in the input file (PR [16356](https://github.com/python/mypy/pull/16356)).
 
-## Mypy 1.7 [unreleased]
+## Mypy 1.7
 
 We’ve just uploaded mypy 1.7 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:
 
-```
     python3 -m pip install -U mypy
-```
 
 You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io).
 
@@ -42,7 +40,7 @@ def foo(*, name: str, age: int) -> None:
 
 Refer to [PEP 692](https://peps.python.org/pep-0692/) for more information. Note that unlike in the current version of the PEP, mypy always treats signatures with `Unpack[SomeTypedDict]` as equivalent to their expanded forms with explicit keyword arguments, and there aren't special type checking rules for TypedDict arguments.
 
-This was contributed by Ivan Levkivskyi back in 2022 ([PR 13471](https://github.com/python/mypy/pull/13471)).
+This was contributed by Ivan Levkivskyi back in 2022 (PR [13471](https://github.com/python/mypy/pull/13471)).
 
 #### TypeVarTuple Support Enabled (Experimental)
 

From ed03aff13d423fd7da836b4a414ba6787166d993 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Fri, 10 Nov 2023 15:11:02 +0000
Subject: [PATCH 243/288] Fix typo in changelog (#16449)

I also updated the blog post.
---
 CHANGELOG.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index d59fe36901be..a5523894a524 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -103,7 +103,7 @@ def f(t: tuple[int, int] | tuple[int, int, int]) -> None:
 
 This feature was contributed by Ivan Levkivskyi (PR [16237](https://github.com/python/mypy/pull/16237)).
 
-#### More Precise Tuple Lenghts (Experimental)
+#### More Precise Tuple Lengths (Experimental)
 
 Mypy supports experimental, more precise checking of tuple type lengths through `--enable-incomplete-feature=PreciseTupleTypes`. Refer to the [documentation](https://mypy.readthedocs.io/en/latest/command_line.html#enabling-incomplete-experimental-features) for more information.
 

From c68bd7ae2cffe8f0377ea9aab54b963b9fac3231 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sat, 11 Nov 2023 12:59:03 +0000
Subject: [PATCH 244/288] Fix crash on Callable self in __call__ (#16453)

Fixes https://github.com/python/mypy/issues/16450

The fix is a bit ad-hoc, but OTOH there is nothing meaningful we can
infer in such situation, so it is probably OK.
---
 mypy/typeops.py                    | 12 ++++++++----
 test-data/unit/check-selftype.test | 15 +++++++++++++++
 2 files changed, 23 insertions(+), 4 deletions(-)

diff --git a/mypy/typeops.py b/mypy/typeops.py
index 2eb3b284e729..e92fad0e872c 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -244,15 +244,15 @@ class C(D[E[T]], Generic[T]): ...
     return expand_type_by_instance(typ, inst_type)
 
 
-def supported_self_type(typ: ProperType) -> bool:
+def supported_self_type(typ: ProperType, allow_callable: bool = True) -> bool:
     """Is this a supported kind of explicit self-types?
 
-    Currently, this means a X or Type[X], where X is an instance or
+    Currently, this means an X or Type[X], where X is an instance or
     a type variable with an instance upper bound.
     """
     if isinstance(typ, TypeType):
         return supported_self_type(typ.item)
-    if isinstance(typ, CallableType):
+    if allow_callable and isinstance(typ, CallableType):
         # Special case: allow class callable instead of Type[...] as cls annotation,
         # as well as callable self for callback protocols.
         return True
@@ -306,7 +306,11 @@ class B(A): pass
     self_param_type = get_proper_type(func.arg_types[0])
 
     variables: Sequence[TypeVarLikeType]
-    if func.variables and supported_self_type(self_param_type):
+    # Having a def __call__(self: Callable[...], ...) can cause infinite recursion. Although
+    # this special-casing looks not very principled, there is nothing meaningful we can infer
+    # from such definition, since it is inherently indefinitely recursive.
+    allow_callable = func.name is None or not func.name.startswith("__call__ of")
+    if func.variables and supported_self_type(self_param_type, allow_callable=allow_callable):
         from mypy.infer import infer_type_arguments
 
         if original_type is None:
diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test
index 29abe9cb025b..e49a7a0e2e2f 100644
--- a/test-data/unit/check-selftype.test
+++ b/test-data/unit/check-selftype.test
@@ -2056,3 +2056,18 @@ reveal_type(C.copy(c))  # N: Revealed type is "__main__.C[builtins.int, builtins
 B.copy(42)  # E: Value of type variable "Self" of "copy" of "B" cannot be "int"
 C.copy(42)  # E: Value of type variable "Self" of "copy" of "B" cannot be "int"
 [builtins fixtures/tuple.pyi]
+
+[case testRecursiveSelfTypeCallMethodNoCrash]
+from typing import Callable, TypeVar
+
+T = TypeVar("T")
+class Partial:
+    def __call__(self: Callable[..., T]) -> T: ...
+
+class Partial2:
+    def __call__(self: Callable[..., T], x: T) -> T: ...
+
+p: Partial
+reveal_type(p())  # N: Revealed type is "Never"
+p2: Partial2
+reveal_type(p2(42))  # N: Revealed type is "builtins.int"

From a7c53d33be8e7620d14ecfba496aa912e95bf4fa Mon Sep 17 00:00:00 2001
From: "Michael R. Crusoe" <1330696+mr-c@users.noreply.github.com>
Date: Sat, 11 Nov 2023 21:06:38 +0100
Subject: [PATCH 245/288] Support Sphinx 7.x (#16460)

This is needed for the Debian package of mypy, as we package the docs
and we have upgraded to Sphinx 7.2.6 from 5.3.0 for the next release.
Thanks!

Bonus: Adjust many links to docs.python.org so that intersphinx can
connect them to local file for offline use.
---
 docs/requirements-docs.txt       | 2 +-
 docs/source/class_basics.rst     | 5 ++---
 docs/source/config_file.rst      | 6 ++----
 docs/source/error_code_list2.rst | 3 +--
 docs/source/getting_started.rst  | 3 +--
 docs/source/html_builder.py      | 5 +++--
 docs/source/more_types.rst       | 2 +-
 7 files changed, 11 insertions(+), 15 deletions(-)

diff --git a/docs/requirements-docs.txt b/docs/requirements-docs.txt
index 395964ad9d44..a3504b07824d 100644
--- a/docs/requirements-docs.txt
+++ b/docs/requirements-docs.txt
@@ -1,2 +1,2 @@
-sphinx>=4.2.0,<5.0.0
+sphinx>=5.1.0
 furo>=2022.3.4
diff --git a/docs/source/class_basics.rst b/docs/source/class_basics.rst
index 73f95f1c5658..1d80da5830ec 100644
--- a/docs/source/class_basics.rst
+++ b/docs/source/class_basics.rst
@@ -263,7 +263,7 @@ effect at runtime:
 Abstract base classes and multiple inheritance
 **********************************************
 
-Mypy supports Python :doc:`abstract base classes <library/abc>` (ABCs). Abstract classes
+Mypy supports Python :doc:`abstract base classes <python:library/abc>` (ABCs). Abstract classes
 have at least one abstract method or property that must be implemented
 by any *concrete* (non-abstract) subclass. You can define abstract base
 classes using the :py:class:`abc.ABCMeta` metaclass and the :py:func:`@abc.abstractmethod <abc.abstractmethod>`
@@ -371,8 +371,7 @@ property or an instance variable.
 Slots
 *****
 
-When a class has explicitly defined
-`__slots__ <https://docs.python.org/3/reference/datamodel.html#slots>`_,
+When a class has explicitly defined :std:term:`__slots__`,
 mypy will check that all attributes assigned to are members of ``__slots__``:
 
 .. code-block:: python
diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst
index b5ce23ff11ec..de769200bf2b 100644
--- a/docs/source/config_file.rst
+++ b/docs/source/config_file.rst
@@ -238,10 +238,8 @@ section of the command line docs.
     Crafting a single regular expression that excludes multiple files while remaining
     human-readable can be a challenge. The above example demonstrates one approach.
     ``(?x)`` enables the ``VERBOSE`` flag for the subsequent regular expression, which
-    `ignores most whitespace and supports comments`__. The above is equivalent to:
-    ``(^one\.py$|two\.pyi$|^three\.)``.
-
-    .. __: https://docs.python.org/3/library/re.html#re.X
+    :py:data:`ignores most whitespace and supports comments <re.VERBOSE>`.
+    The above is equivalent to: ``(^one\.py$|two\.pyi$|^three\.)``.
 
     For more details, see :option:`--exclude <mypy --exclude>`.
 
diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst
index 9e24f21909d5..60f870c57db9 100644
--- a/docs/source/error_code_list2.rst
+++ b/docs/source/error_code_list2.rst
@@ -524,8 +524,7 @@ that only existed during type-checking.
 In runtime it fails with expected ``NameError``,
 which can cause real problem in production, hidden from mypy.
 
-But, in Python3.11 ``reveal_type``
-`was added to typing.py <https://docs.python.org/3/library/typing.html#typing.reveal_type>`_.
+But, in Python3.11 :py:func:`typing.reveal_type` was added.
 ``typing_extensions`` ported this helper to all supported Python versions.
 
 Now users can actually import ``reveal_type`` to make the runtime code safe.
diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst
index 463c73b2fe76..7ea4ddd148ea 100644
--- a/docs/source/getting_started.rst
+++ b/docs/source/getting_started.rst
@@ -256,8 +256,7 @@ Mypy can also understand how to work with types from libraries that you use.
 
 For instance, mypy comes out of the box with an intimate knowledge of the
 Python standard library. For example, here is a function which uses the
-``Path`` object from the
-`pathlib standard library module <https://docs.python.org/3/library/pathlib.html>`_:
+``Path`` object from the :doc:`pathlib standard library module <python:library/pathlib>`:
 
 .. code-block:: python
 
diff --git a/docs/source/html_builder.py b/docs/source/html_builder.py
index 3064833b5631..ea3594e0617b 100644
--- a/docs/source/html_builder.py
+++ b/docs/source/html_builder.py
@@ -9,11 +9,12 @@
 from sphinx.addnodes import document
 from sphinx.application import Sphinx
 from sphinx.builders.html import StandaloneHTMLBuilder
+from sphinx.environment import BuildEnvironment
 
 
 class MypyHTMLBuilder(StandaloneHTMLBuilder):
-    def __init__(self, app: Sphinx) -> None:
-        super().__init__(app)
+    def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
+        super().__init__(app, env)
         self._ref_to_doc = {}
 
     def write_doc(self, docname: str, doctree: document) -> None:
diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst
index b27764a9e87c..cb3ef64b39a7 100644
--- a/docs/source/more_types.rst
+++ b/docs/source/more_types.rst
@@ -829,7 +829,7 @@ Typing async/await
 
 Mypy lets you type coroutines that use the ``async/await`` syntax.
 For more information regarding coroutines, see :pep:`492` and the
-`asyncio documentation <https://docs.python.org/3/library/asyncio.html>`_.
+`asyncio documentation <python:library/asyncio>`_.
 
 Functions defined using ``async def`` are typed similar to normal functions.
 The return type annotation should be the same as the type of the value you

From 8ae84edc3033c3cfa6a6dbfb920d859a9be4277d Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 11 Nov 2023 12:07:58 -0800
Subject: [PATCH 246/288] stubtest: special case final and deprecated (#16457)

We should probably lean into the type checker harder here

Fixes #14950
Fixes
https://github.com/python/typeshed/pull/11009#issuecomment-1805013903
---
 mypy/stubtest.py          |  7 +++++++
 mypy/test/teststubtest.py | 19 +++++++++++++++++++
 mypy/types.py             |  3 +++
 3 files changed, 29 insertions(+)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index e80ea4eac71f..ae410ff2ba6b 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -1215,6 +1215,12 @@ def _resolve_funcitem_from_decorator(dec: nodes.OverloadPart) -> nodes.FuncItem
     def apply_decorator_to_funcitem(
         decorator: nodes.Expression, func: nodes.FuncItem
     ) -> nodes.FuncItem | None:
+        if (
+            isinstance(decorator, nodes.CallExpr)
+            and isinstance(decorator.callee, nodes.RefExpr)
+            and decorator.callee.fullname in mypy.types.DEPRECATED_TYPE_NAMES
+        ):
+            return func
         if not isinstance(decorator, nodes.RefExpr):
             return None
         if not decorator.fullname:
@@ -1223,6 +1229,7 @@ def apply_decorator_to_funcitem(
         if (
             decorator.fullname in ("builtins.staticmethod", "abc.abstractmethod")
             or decorator.fullname in mypy.types.OVERLOAD_NAMES
+            or decorator.fullname in mypy.types.FINAL_DECORATOR_NAMES
         ):
             return func
         if decorator.fullname == "builtins.classmethod":
diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py
index a52d9ef5de31..0c1817202f1f 100644
--- a/mypy/test/teststubtest.py
+++ b/mypy/test/teststubtest.py
@@ -71,6 +71,8 @@ class Sequence(Iterable[_T_co]): ...
 class Tuple(Sequence[_T_co]): ...
 class NamedTuple(tuple[Any, ...]): ...
 def overload(func: _T) -> _T: ...
+def deprecated(__msg: str) -> Callable[[_T], _T]: ...
+def final(func: _T) -> _T: ...
 """
 
 stubtest_builtins_stub = """
@@ -630,6 +632,23 @@ def f5(__b: str) -> str: ...
             runtime="def f5(x, /): pass",
             error=None,
         )
+        yield Case(
+            stub="""
+            from typing import deprecated, final
+            class Foo:
+                @overload
+                @final
+                def f6(self, __a: int) -> int: ...
+                @overload
+                @deprecated("evil")
+                def f6(self, __b: str) -> str: ...
+            """,
+            runtime="""
+            class Foo:
+                def f6(self, x, /): pass
+            """,
+            error=None,
+        )
 
     @collect_cases
     def test_property(self) -> Iterator[Case]:
diff --git a/mypy/types.py b/mypy/types.py
index 43003a9a22b6..b100cf569086 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -119,6 +119,9 @@
 # Supported Annotated type names.
 ANNOTATED_TYPE_NAMES: Final = ("typing.Annotated", "typing_extensions.Annotated")
 
+# Supported @deprecated type names
+DEPRECATED_TYPE_NAMES: Final = ("typing.deprecated", "typing_extensions.deprecated")
+
 # We use this constant in various places when checking `tuple` subtyping:
 TUPLE_LIKE_INSTANCE_NAMES: Final = (
     "builtins.tuple",

From f7a05300f70321e18904d908c66522cc792d4123 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 11 Nov 2023 12:09:29 -0800
Subject: [PATCH 247/288] stubtest: warn about missing __del__ (#16456)

Fixes #16414
---
 mypy/stubtest.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index ae410ff2ba6b..adffe2003ad4 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -1381,7 +1381,6 @@ def verify_typealias(
         "__annotations__",
         "__text_signature__",
         "__weakref__",
-        "__del__",  # Only ever called when an object is being deleted, who cares?
         "__hash__",
         "__getattr__",  # resulting behaviour might be typed explicitly
         "__setattr__",  # defining this on a class can cause worse type checking

From e4c43cb68b742f0f51759565d1d3c4a722d16f55 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sun, 12 Nov 2023 00:00:11 +0300
Subject: [PATCH 248/288] [stubtest] support `@type_check_only` decorator
 (#16422)

There are several `TODO` items for the future (not in this PR):
- [ ] Add an error code to disallow importing things that are decorated
with `@type_check_only`
- [ ] Support `@overload`ed functions. But, how? There are two options:
we can treat individual overload cases as `@type_check_only` or we can
treat the whole func. Since `typeshed` does not have any examples of
this, I prefer to defer this discussion to somewhere else and support
this when we decide

Refs https://github.com/python/mypy/issues/15146

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 mypy/nodes.py                           |  7 ++++
 mypy/semanal.py                         |  6 ++++
 mypy/stubtest.py                        | 27 +++++++++++++++
 mypy/test/teststubtest.py               | 45 +++++++++++++++++++++++++
 mypy/types.py                           |  3 ++
 test-data/unit/fixtures/typing-full.pyi |  3 ++
 6 files changed, 91 insertions(+)

diff --git a/mypy/nodes.py b/mypy/nodes.py
index d65a23a6b7fe..17e06613d1e3 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -513,6 +513,7 @@ class FuncBase(Node):
         "is_static",  # Uses "@staticmethod" (explicit or implicit)
         "is_final",  # Uses "@final"
         "is_explicit_override",  # Uses "@override"
+        "is_type_check_only",  # Uses "@type_check_only"
         "_fullname",
     )
 
@@ -530,6 +531,7 @@ def __init__(self) -> None:
         self.is_static = False
         self.is_final = False
         self.is_explicit_override = False
+        self.is_type_check_only = False
         # Name with module prefix
         self._fullname = ""
 
@@ -2866,6 +2868,7 @@ class is generic then it will be a type constructor of higher kind.
         "type_var_tuple_suffix",
         "self_type",
         "dataclass_transform_spec",
+        "is_type_check_only",
     )
 
     _fullname: str  # Fully qualified name
@@ -3016,6 +3019,9 @@ class is generic then it will be a type constructor of higher kind.
     # Added if the corresponding class is directly decorated with `typing.dataclass_transform`
     dataclass_transform_spec: DataclassTransformSpec | None
 
+    # Is set to `True` when class is decorated with `@typing.type_check_only`
+    is_type_check_only: bool
+
     FLAGS: Final = [
         "is_abstract",
         "is_enum",
@@ -3072,6 +3078,7 @@ def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None
         self.metadata = {}
         self.self_type = None
         self.dataclass_transform_spec = None
+        self.is_type_check_only = False
 
     def add_type_vars(self) -> None:
         self.has_type_var_tuple_type = False
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 6f322af816ea..68f0d04e77ca 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -251,6 +251,7 @@
     REVEAL_TYPE_NAMES,
     TPDICT_NAMES,
     TYPE_ALIAS_NAMES,
+    TYPE_CHECK_ONLY_NAMES,
     TYPED_NAMEDTUPLE_NAMES,
     AnyType,
     CallableType,
@@ -1568,6 +1569,9 @@ def visit_decorator(self, dec: Decorator) -> None:
                     removed.append(i)
                 else:
                     self.fail("@final cannot be used with non-method functions", d)
+            elif refers_to_fullname(d, TYPE_CHECK_ONLY_NAMES):
+                # TODO: support `@overload` funcs.
+                dec.func.is_type_check_only = True
             elif isinstance(d, CallExpr) and refers_to_fullname(
                 d.callee, DATACLASS_TRANSFORM_NAMES
             ):
@@ -1868,6 +1872,8 @@ def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None
                     self.fail("@runtime_checkable can only be used with protocol classes", defn)
             elif decorator.fullname in FINAL_DECORATOR_NAMES:
                 defn.info.is_final = True
+            elif refers_to_fullname(decorator, TYPE_CHECK_ONLY_NAMES):
+                defn.info.is_type_check_only = True
         elif isinstance(decorator, CallExpr) and refers_to_fullname(
             decorator.callee, DATACLASS_TRANSFORM_NAMES
         ):
diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index adffe2003ad4..46468e8e18e0 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -484,6 +484,19 @@ def _verify_metaclass(
 def verify_typeinfo(
     stub: nodes.TypeInfo, runtime: MaybeMissing[type[Any]], object_path: list[str]
 ) -> Iterator[Error]:
+    if stub.is_type_check_only:
+        # This type only exists in stubs, we only check that the runtime part
+        # is missing. Other checks are not required.
+        if not isinstance(runtime, Missing):
+            yield Error(
+                object_path,
+                'is marked as "@type_check_only", but also exists at runtime',
+                stub,
+                runtime,
+                stub_desc=repr(stub),
+            )
+        return
+
     if isinstance(runtime, Missing):
         yield Error(object_path, "is not present at runtime", stub, runtime, stub_desc=repr(stub))
         return
@@ -1066,6 +1079,7 @@ def verify_var(
 def verify_overloadedfuncdef(
     stub: nodes.OverloadedFuncDef, runtime: MaybeMissing[Any], object_path: list[str]
 ) -> Iterator[Error]:
+    # TODO: support `@type_check_only` decorator
     if isinstance(runtime, Missing):
         yield Error(object_path, "is not present at runtime", stub, runtime)
         return
@@ -1260,6 +1274,19 @@ def apply_decorator_to_funcitem(
 def verify_decorator(
     stub: nodes.Decorator, runtime: MaybeMissing[Any], object_path: list[str]
 ) -> Iterator[Error]:
+    if stub.func.is_type_check_only:
+        # This function only exists in stubs, we only check that the runtime part
+        # is missing. Other checks are not required.
+        if not isinstance(runtime, Missing):
+            yield Error(
+                object_path,
+                'is marked as "@type_check_only", but also exists at runtime',
+                stub,
+                runtime,
+                stub_desc=repr(stub),
+            )
+        return
+
     if isinstance(runtime, Missing):
         yield Error(object_path, "is not present at runtime", stub, runtime)
         return
diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py
index 0c1817202f1f..6a973d16d7bc 100644
--- a/mypy/test/teststubtest.py
+++ b/mypy/test/teststubtest.py
@@ -71,6 +71,7 @@ class Sequence(Iterable[_T_co]): ...
 class Tuple(Sequence[_T_co]): ...
 class NamedTuple(tuple[Any, ...]): ...
 def overload(func: _T) -> _T: ...
+def type_check_only(func: _T) -> _T: ...
 def deprecated(__msg: str) -> Callable[[_T], _T]: ...
 def final(func: _T) -> _T: ...
 """
@@ -2046,6 +2047,50 @@ def some(self) -> int: ...
             error=None,
         )
 
+    @collect_cases
+    def test_type_check_only(self) -> Iterator[Case]:
+        yield Case(
+            stub="from typing import type_check_only, overload",
+            runtime="from typing import overload",
+            error=None,
+        )
+        # You can have public types that are only defined in stubs
+        # with `@type_check_only`:
+        yield Case(
+            stub="""
+            @type_check_only
+            class A1: ...
+            """,
+            runtime="",
+            error=None,
+        )
+        # Having `@type_check_only` on a type that exists at runtime is an error
+        yield Case(
+            stub="""
+            @type_check_only
+            class A2: ...
+            """,
+            runtime="class A2: ...",
+            error="A2",
+        )
+        # The same is true for functions:
+        yield Case(
+            stub="""
+            @type_check_only
+            def func1() -> None: ...
+            """,
+            runtime="",
+            error=None,
+        )
+        yield Case(
+            stub="""
+            @type_check_only
+            def func2() -> None: ...
+            """,
+            runtime="def func2() -> None: ...",
+            error="func2",
+        )
+
 
 def remove_color_code(s: str) -> str:
     return re.sub("\\x1b.*?m", "", s)  # this works!
diff --git a/mypy/types.py b/mypy/types.py
index b100cf569086..e7738bd7d088 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -113,6 +113,9 @@
 # Supported @final decorator names.
 FINAL_DECORATOR_NAMES: Final = ("typing.final", "typing_extensions.final")
 
+# Supported @type_check_only names.
+TYPE_CHECK_ONLY_NAMES: Final = ("typing.type_check_only", "typing_extensions.type_check_only")
+
 # Supported Literal type names.
 LITERAL_TYPE_NAMES: Final = ("typing.Literal", "typing_extensions.Literal")
 
diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi
index ef903ace78af..ca8a2413f05f 100644
--- a/test-data/unit/fixtures/typing-full.pyi
+++ b/test-data/unit/fixtures/typing-full.pyi
@@ -196,3 +196,6 @@ def override(__arg: T) -> T: ...
 
 # Was added in 3.11
 def reveal_type(__obj: T) -> T: ...
+
+# Only exists in type checking time:
+def type_check_only(__func_or_class: T) -> T: ...

From 8f3fe7c0768ffcbcce760b3088702d95f3c57220 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sun, 12 Nov 2023 09:54:52 +0000
Subject: [PATCH 249/288] Exclude private attributes from override checks
 (#16464)

Fixes https://github.com/python/mypy/issues/9910
Fixes https://github.com/python/mypy/issues/16452

We already exclude private names from override type compatibility checks
etc., but it looks like some override checks were still performed, we
need to skip them, since private name is actually a different name in
subclass.

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 mypy/checker.py                                     |  7 +++++--
 test-data/unit/check-dataclasses.test               | 13 +++++++++++++
 test-data/unit/check-final.test                     | 13 +++++++++++++
 test-data/unit/check-functions.test                 | 10 ++++++++++
 .../unit/fine-grained-dataclass-transform.test      |  4 ++--
 test-data/unit/fixtures/dataclasses.pyi             |  5 +++++
 6 files changed, 48 insertions(+), 4 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index e4eb58d40715..b9a9d3affb90 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -1879,6 +1879,7 @@ def check_explicit_override_decorator(
             found_method_base_classes
             and not defn.is_explicit_override
             and defn.name not in ("__init__", "__new__")
+            and not is_private(defn.name)
         ):
             self.msg.explicit_override_decorator_missing(
                 defn.name, found_method_base_classes[0].fullname, context or defn
@@ -1921,7 +1922,7 @@ def check_method_or_accessor_override_for_base(
             base_attr = base.names.get(name)
             if base_attr:
                 # First, check if we override a final (always an error, even with Any types).
-                if is_final_node(base_attr.node):
+                if is_final_node(base_attr.node) and not is_private(name):
                     self.msg.cant_override_final(name, base.name, defn)
                 # Second, final can't override anything writeable independently of types.
                 if defn.is_final:
@@ -2680,7 +2681,7 @@ class C(B, A[int]): ...  # this is unsafe because...
             ok = True
         # Final attributes can never be overridden, but can override
         # non-final read-only attributes.
-        if is_final_node(second.node):
+        if is_final_node(second.node) and not is_private(name):
             self.msg.cant_override_final(name, base2.name, ctx)
         if is_final_node(first.node):
             self.check_if_final_var_override_writable(name, second.node, ctx)
@@ -3308,6 +3309,8 @@ def check_compatibility_final_super(
         """
         if not isinstance(base_node, (Var, FuncBase, Decorator)):
             return True
+        if is_private(node.name):
+            return True
         if base_node.is_final and (node.is_final or not isinstance(base_node, Var)):
             # Give this error only for explicit override attempt with `Final`, or
             # if we are overriding a final method with variable.
diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test
index d37ae569cc5e..107298875761 100644
--- a/test-data/unit/check-dataclasses.test
+++ b/test-data/unit/check-dataclasses.test
@@ -2531,3 +2531,16 @@ class Foo:
 
     c: int  # E: Name "c" already defined on line 5
 [builtins fixtures/dataclasses.pyi]
+
+[case testDataclassInheritanceWorksWithExplicitOverrides]
+# flags: --enable-error-code explicit-override
+from dataclasses  import dataclass
+
+@dataclass
+class Base:
+    x: int
+
+@dataclass
+class Child(Base):
+    y: int
+[builtins fixtures/dataclasses.pyi]
diff --git a/test-data/unit/check-final.test b/test-data/unit/check-final.test
index da034caced76..a2fd64386707 100644
--- a/test-data/unit/check-final.test
+++ b/test-data/unit/check-final.test
@@ -1117,3 +1117,16 @@ from typing import Final
 class MyClass:
     a: None
     a: Final[int] = 1  # E: Cannot redefine an existing name as final  # E: Name "a" already defined on line 5
+
+[case testFinalOverrideAllowedForPrivate]
+from typing import Final, final
+
+class Parent:
+    __foo: Final[int] = 0
+    @final
+    def __bar(self) -> None: ...
+
+class Child(Parent):
+    __foo: Final[int] = 1
+    @final
+    def __bar(self) -> None: ...
diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test
index cd098a84d4d3..b3df5fddafba 100644
--- a/test-data/unit/check-functions.test
+++ b/test-data/unit/check-functions.test
@@ -3159,6 +3159,16 @@ class D(A, B):
     def f(self, z: int) -> str: pass  # E: Method "f" is not using @override but is overriding a method in class "__main__.A"
 [typing fixtures/typing-override.pyi]
 
+[case testExplicitOverrideAllowedForPrivate]
+# flags: --enable-error-code explicit-override --python-version 3.12
+
+class B:
+    def __f(self, y: int) -> str: pass
+
+class C(B):
+    def __f(self, y: int) -> str: pass  # OK
+[typing fixtures/typing-override.pyi]
+
 [case testCallableProperty]
 from typing import Callable
 
diff --git a/test-data/unit/fine-grained-dataclass-transform.test b/test-data/unit/fine-grained-dataclass-transform.test
index cc297bc344aa..89628256fda5 100644
--- a/test-data/unit/fine-grained-dataclass-transform.test
+++ b/test-data/unit/fine-grained-dataclass-transform.test
@@ -86,9 +86,9 @@ class A(Dataclass):
 
 [out]
 main:7: error: Unexpected keyword argument "x" for "B"
-builtins.pyi:13: note: "B" defined here
+builtins.pyi:14: note: "B" defined here
 main:7: error: Unexpected keyword argument "y" for "B"
-builtins.pyi:13: note: "B" defined here
+builtins.pyi:14: note: "B" defined here
 ==
 
 [case frozenInheritanceViaDefault]
diff --git a/test-data/unit/fixtures/dataclasses.pyi b/test-data/unit/fixtures/dataclasses.pyi
index 059c853a621f..29f87ae97e62 100644
--- a/test-data/unit/fixtures/dataclasses.pyi
+++ b/test-data/unit/fixtures/dataclasses.pyi
@@ -3,6 +3,7 @@ from typing import (
     Generic, Iterator, Iterable, Mapping, Optional, Sequence, Tuple,
     TypeVar, Union, overload,
 )
+from typing_extensions import override
 
 _T = TypeVar('_T')
 _U = TypeVar('_U')
@@ -29,8 +30,10 @@ class dict(Mapping[KT, VT]):
     def __init__(self, **kwargs: VT) -> None: pass
     @overload
     def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
+    @override
     def __getitem__(self, key: KT) -> VT: pass
     def __setitem__(self, k: KT, v: VT) -> None: pass
+    @override
     def __iter__(self) -> Iterator[KT]: pass
     def __contains__(self, item: object) -> int: pass
     def update(self, a: Mapping[KT, VT]) -> None: pass
@@ -42,7 +45,9 @@ class dict(Mapping[KT, VT]):
 
 class list(Generic[_T], Sequence[_T]):
     def __contains__(self, item: object) -> int: pass
+    @override
     def __getitem__(self, key: int) -> _T: pass
+    @override
     def __iter__(self) -> Iterator[_T]: pass
 
 class function: pass

From efa5dcb35929f8555c0fd687f493ab15ac13881d Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sun, 12 Nov 2023 20:31:25 +0000
Subject: [PATCH 250/288] Fix missing meet case exposed by len narrowing
 (#16470)

Fixes https://github.com/python/mypy/issues/16468

The fix is straightforward.

Btw when fixing this I noticed that we disregard type arguments when
narrowing, for example:
```python
x: Sequence[int]
if isinstance(x, tuple):
    reveal_type(x)  # tuple[Any, ...], but should be `tuple[int, ...]`
```
I guess fixing this may be tricky, and it is quite old behavior.
---
 mypy/meet.py                        |  3 ++-
 test-data/unit/check-narrowing.test | 13 +++++++++++++
 test-data/unit/fixtures/len.pyi     |  2 +-
 3 files changed, 16 insertions(+), 2 deletions(-)

diff --git a/mypy/meet.py b/mypy/meet.py
index d2fb16808425..610185d6bbbf 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -16,6 +16,7 @@
 from mypy.typeops import is_recursive_pair, make_simplified_union, tuple_fallback
 from mypy.types import (
     MYPYC_NATIVE_INT_NAMES,
+    TUPLE_LIKE_INSTANCE_NAMES,
     AnyType,
     CallableType,
     DeletedType,
@@ -936,7 +937,7 @@ def visit_tuple_type(self, t: TupleType) -> ProperType:
             return TupleType(items, tuple_fallback(t))
         elif isinstance(self.s, Instance):
             # meet(Tuple[t1, t2, <...>], Tuple[s, ...]) == Tuple[meet(t1, s), meet(t2, s), <...>].
-            if self.s.type.fullname == "builtins.tuple" and self.s.args:
+            if self.s.type.fullname in TUPLE_LIKE_INSTANCE_NAMES and self.s.args:
                 return t.copy_modified(items=[meet_types(it, self.s.args[0]) for it in t.items])
             elif is_proper_subtype(t, self.s):
                 # A named tuple that inherits from a normal class
diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test
index 5b7fadf41c79..d0ad1367aca0 100644
--- a/test-data/unit/check-narrowing.test
+++ b/test-data/unit/check-narrowing.test
@@ -1910,3 +1910,16 @@ if len(x) == a:
 else:
     reveal_type(x)  # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]"
 [builtins fixtures/len.pyi]
+
+[case testNarrowingLenUnionWithUnreachable]
+from typing import Union, Sequence
+
+def f(x: Union[int, Sequence[int]]) -> None:
+    if (
+        isinstance(x, tuple)
+        and len(x) == 2
+        and isinstance(x[0], int)
+        and isinstance(x[1], int)
+    ):
+        reveal_type(x)  # N: Revealed type is "Tuple[builtins.int, builtins.int]"
+[builtins fixtures/len.pyi]
diff --git a/test-data/unit/fixtures/len.pyi b/test-data/unit/fixtures/len.pyi
index c72596661858..ee39d952701f 100644
--- a/test-data/unit/fixtures/len.pyi
+++ b/test-data/unit/fixtures/len.pyi
@@ -10,7 +10,7 @@ class object:
 class type:
     def __init__(self, x) -> None: pass
 
-class tuple(Generic[T]):
+class tuple(Sequence[T]):
     def __len__(self) -> int: pass
 
 class list(Sequence[T]): pass

From 023eb4101347dd151a2ce5bf7baf5a60d2de4145 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Mon, 13 Nov 2023 11:10:15 +0300
Subject: [PATCH 251/288] Refactor class decorator: this enables
 `type_check_only` support for `TypedDict` and `NamedTuple` (#16469)

I've noticed that `TypedDict` and `NamedTuple` classes are special cased
during semantic analyzisys. They had their own logic for class-level
decorators. This is fine, but we need some common ground.

As a side-effect, they can now be `type_check_only`!
---
 mypy/semanal.py           | 25 ++++++++++++++++---------
 mypy/test/teststubtest.py | 22 ++++++++++++++++++++++
 2 files changed, 38 insertions(+), 9 deletions(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index 68f0d04e77ca..6714e8c56de9 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -1743,9 +1743,8 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> bool:
         if is_typeddict:
             for decorator in defn.decorators:
                 decorator.accept(self)
-                if isinstance(decorator, RefExpr):
-                    if decorator.fullname in FINAL_DECORATOR_NAMES and info is not None:
-                        info.is_final = True
+                if info is not None:
+                    self.analyze_class_decorator_common(defn, info, decorator)
             if info is None:
                 self.mark_incomplete(defn.name, defn)
             else:
@@ -1781,8 +1780,7 @@ def analyze_namedtuple_classdef(
                 with self.scope.class_scope(defn.info):
                     for deco in defn.decorators:
                         deco.accept(self)
-                        if isinstance(deco, RefExpr) and deco.fullname in FINAL_DECORATOR_NAMES:
-                            info.is_final = True
+                        self.analyze_class_decorator_common(defn, defn.info, deco)
                     with self.named_tuple_analyzer.save_namedtuple_body(info):
                         self.analyze_class_body_common(defn)
             return True
@@ -1864,21 +1862,30 @@ def leave_class(self) -> None:
 
     def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None:
         decorator.accept(self)
+        self.analyze_class_decorator_common(defn, defn.info, decorator)
         if isinstance(decorator, RefExpr):
             if decorator.fullname in RUNTIME_PROTOCOL_DECOS:
                 if defn.info.is_protocol:
                     defn.info.runtime_protocol = True
                 else:
                     self.fail("@runtime_checkable can only be used with protocol classes", defn)
-            elif decorator.fullname in FINAL_DECORATOR_NAMES:
-                defn.info.is_final = True
-            elif refers_to_fullname(decorator, TYPE_CHECK_ONLY_NAMES):
-                defn.info.is_type_check_only = True
         elif isinstance(decorator, CallExpr) and refers_to_fullname(
             decorator.callee, DATACLASS_TRANSFORM_NAMES
         ):
             defn.info.dataclass_transform_spec = self.parse_dataclass_transform_spec(decorator)
 
+    def analyze_class_decorator_common(
+        self, defn: ClassDef, info: TypeInfo, decorator: Expression
+    ) -> None:
+        """Common method for applying class decorators.
+
+        Called on regular classes, typeddicts, and namedtuples.
+        """
+        if refers_to_fullname(decorator, FINAL_DECORATOR_NAMES):
+            info.is_final = True
+        elif refers_to_fullname(decorator, TYPE_CHECK_ONLY_NAMES):
+            info.is_type_check_only = True
+
     def clean_up_bases_and_infer_type_variables(
         self, defn: ClassDef, base_type_exprs: list[Expression], context: Context
     ) -> tuple[list[Expression], list[TypeVarLikeType], bool]:
diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py
index 6a973d16d7bc..58602be3a624 100644
--- a/mypy/test/teststubtest.py
+++ b/mypy/test/teststubtest.py
@@ -2073,6 +2073,28 @@ class A2: ...
             runtime="class A2: ...",
             error="A2",
         )
+        # The same is true for NamedTuples and TypedDicts:
+        yield Case(
+            stub="from typing_extensions import NamedTuple, TypedDict",
+            runtime="from typing_extensions import NamedTuple, TypedDict",
+            error=None,
+        )
+        yield Case(
+            stub="""
+            @type_check_only
+            class NT1(NamedTuple): ...
+            """,
+            runtime="class NT1(NamedTuple): ...",
+            error="NT1",
+        )
+        yield Case(
+            stub="""
+            @type_check_only
+            class TD1(TypedDict): ...
+            """,
+            runtime="class TD1(TypedDict): ...",
+            error="TD1",
+        )
         # The same is true for functions:
         yield Case(
             stub="""

From 93e65e443eeac1e0f25a88b33851bb5239bab1a9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ole=20Peder=20Brandtz=C3=A6g?= <olepbr@gmail.com>
Date: Mon, 13 Nov 2023 16:51:26 +0100
Subject: [PATCH 252/288] Add missing language identifiers in changelog code
 snippets (#16475)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Lets the Markdown renderer highlight the code as Python rather than
leaving it un-highlighted :–)

This PR does not change mypy, nor do I believe any tests should be
written, as it only affects the rendering of a few code snippets in the
changelog (other snippets further down in the changelog already use the
identifier).
---
 CHANGELOG.md | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index a5523894a524..f28cdb1ccc25 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -16,7 +16,7 @@ You can read the full documentation for this release on [Read the Docs](http://m
 
 Mypy now has support for using `Unpack[...]` with a TypedDict type to annotate `**kwargs` arguments enabled by default. Example:
 
-```
+```python
 # Or 'from typing_extensions import ...'
 from typing import TypedDict, Unpack
 
@@ -33,7 +33,7 @@ foo(name=1)  # Error
 
 The definition of `foo` above is equivalent to the one below, with keyword-only arguments `name` and `age`:
 
-```
+```python
 def foo(*, name: str, age: int) -> None:
     ...
 ```
@@ -94,7 +94,7 @@ The new type inference algorithm was contributed by Ivan Levkivskyi. PR [16345](
 
 Mypy now can narrow tuple types using `len()` checks. Example:
 
-```
+```python
 def f(t: tuple[int, int] | tuple[int, int, int]) -> None:
     if len(t) == 2:
         a, b = t   # Ok

From fbb77c31ad48e08105b5d02c2888d803bdcd6fc5 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 13 Nov 2023 16:22:25 +0000
Subject: [PATCH 253/288] Special-case unions in polymorphic inference (#16461)

Fixes https://github.com/python/mypy/issues/16451

This special-casing is unfortunate, but this is the best I came up so
far.
---
 mypy/solve.py                                 | 53 +++++++++++++++----
 test-data/unit/check-inference.test           | 21 ++++++++
 .../unit/check-parameter-specification.test   | 22 ++++++++
 3 files changed, 87 insertions(+), 9 deletions(-)

diff --git a/mypy/solve.py b/mypy/solve.py
index efe8e487c506..9770364bf892 100644
--- a/mypy/solve.py
+++ b/mypy/solve.py
@@ -6,7 +6,7 @@
 from typing import Iterable, Sequence
 from typing_extensions import TypeAlias as _TypeAlias
 
-from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints
+from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints, neg_op
 from mypy.expandtype import expand_type
 from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort
 from mypy.join import join_types
@@ -69,6 +69,10 @@ def solve_constraints(
         extra_vars.extend([v.id for v in c.extra_tvars if v.id not in vars + extra_vars])
         originals.update({v.id: v for v in c.extra_tvars if v.id not in originals})
 
+    if allow_polymorphic:
+        # Constraints inferred from unions require special handling in polymorphic inference.
+        constraints = skip_reverse_union_constraints(constraints)
+
     # Collect a list of constraints for each type variable.
     cmap: dict[TypeVarId, list[Constraint]] = {tv: [] for tv in vars + extra_vars}
     for con in constraints:
@@ -431,10 +435,7 @@ def transitive_closure(
                     uppers[l] |= uppers[upper]
             for lt in lowers[lower]:
                 for ut in uppers[upper]:
-                    # TODO: what if secondary constraints result in inference
-                    # against polymorphic actual (also in below branches)?
-                    remaining |= set(infer_constraints(lt, ut, SUBTYPE_OF))
-                    remaining |= set(infer_constraints(ut, lt, SUPERTYPE_OF))
+                    add_secondary_constraints(remaining, lt, ut)
         elif c.op == SUBTYPE_OF:
             if c.target in uppers[c.type_var]:
                 continue
@@ -442,8 +443,7 @@ def transitive_closure(
                 if (l, c.type_var) in graph:
                     uppers[l].add(c.target)
             for lt in lowers[c.type_var]:
-                remaining |= set(infer_constraints(lt, c.target, SUBTYPE_OF))
-                remaining |= set(infer_constraints(c.target, lt, SUPERTYPE_OF))
+                add_secondary_constraints(remaining, lt, c.target)
         else:
             assert c.op == SUPERTYPE_OF
             if c.target in lowers[c.type_var]:
@@ -452,11 +452,24 @@ def transitive_closure(
                 if (c.type_var, u) in graph:
                     lowers[u].add(c.target)
             for ut in uppers[c.type_var]:
-                remaining |= set(infer_constraints(ut, c.target, SUPERTYPE_OF))
-                remaining |= set(infer_constraints(c.target, ut, SUBTYPE_OF))
+                add_secondary_constraints(remaining, c.target, ut)
     return graph, lowers, uppers
 
 
+def add_secondary_constraints(cs: set[Constraint], lower: Type, upper: Type) -> None:
+    """Add secondary constraints inferred between lower and upper (in place)."""
+    if isinstance(get_proper_type(upper), UnionType) and isinstance(
+        get_proper_type(lower), UnionType
+    ):
+        # When both types are unions, this can lead to inferring spurious constraints,
+        # for example Union[T, int] <: S <: Union[T, int] may infer T <: int.
+        # To avoid this, just skip them for now.
+        return
+    # TODO: what if secondary constraints result in inference against polymorphic actual?
+    cs.update(set(infer_constraints(lower, upper, SUBTYPE_OF)))
+    cs.update(set(infer_constraints(upper, lower, SUPERTYPE_OF)))
+
+
 def compute_dependencies(
     tvars: list[TypeVarId], graph: Graph, lowers: Bounds, uppers: Bounds
 ) -> dict[TypeVarId, list[TypeVarId]]:
@@ -494,6 +507,28 @@ def check_linear(scc: set[TypeVarId], lowers: Bounds, uppers: Bounds) -> bool:
     return True
 
 
+def skip_reverse_union_constraints(cs: list[Constraint]) -> list[Constraint]:
+    """Avoid ambiguities for constraints inferred from unions during polymorphic inference.
+
+    Polymorphic inference implicitly relies on assumption that a reverse of a linear constraint
+    is a linear constraint. This is however not true in presence of union types, for example
+    T :> Union[S, int] vs S <: T. Trying to solve such constraints would be detected ambiguous
+    as (T, S) form a non-linear SCC. However, simply removing the linear part results in a valid
+    solution T = Union[S, int], S = <free>.
+
+    TODO: a cleaner solution may be to avoid inferring such constraints in first place, but
+    this would require passing around a flag through all infer_constraints() calls.
+    """
+    reverse_union_cs = set()
+    for c in cs:
+        p_target = get_proper_type(c.target)
+        if isinstance(p_target, UnionType):
+            for item in p_target.items:
+                if isinstance(item, TypeVarType):
+                    reverse_union_cs.add(Constraint(item, neg_op(c.op), c.origin_type_var))
+    return [c for c in cs if c not in reverse_union_cs]
+
+
 def get_vars(target: Type, vars: list[TypeVarId]) -> set[TypeVarId]:
     """Find type variables for which we are solving in a target type."""
     return {tv.id for tv in get_all_type_vars(target)} & set(vars)
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 0d162238450a..6c98ba2088b1 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -3767,3 +3767,24 @@ def f(values: List[T]) -> T: ...
 x = foo(f([C()]))
 reveal_type(x)  # N: Revealed type is "__main__.C"
 [builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericCallableUnion]
+from typing import Callable, TypeVar, List, Union
+
+T = TypeVar("T")
+S = TypeVar("S")
+
+def dec(f: Callable[[S], T]) -> Callable[[S], List[T]]: ...
+@dec
+def func(arg: T) -> Union[T, str]:
+    ...
+reveal_type(func)  # N: Revealed type is "def [S] (S`1) -> builtins.list[Union[S`1, builtins.str]]"
+reveal_type(func(42))  # N: Revealed type is "builtins.list[Union[builtins.int, builtins.str]]"
+
+def dec2(f: Callable[[S], List[T]]) -> Callable[[S], T]: ...
+@dec2
+def func2(arg: T) -> List[Union[T, str]]:
+    ...
+reveal_type(func2)  # N: Revealed type is "def [S] (S`4) -> Union[S`4, builtins.str]"
+reveal_type(func2(42))  # N: Revealed type is "Union[builtins.int, builtins.str]"
+[builtins fixtures/list.pyi]
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index db8c76fd21e9..eb6fbf07f045 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -2086,3 +2086,25 @@ reveal_type(d(b, f1))  # E: Cannot infer type argument 1 of "d" \
                        # N: Revealed type is "def (*Any, **Any)"
 reveal_type(d(b, f2))  # N: Revealed type is "def (builtins.int)"
 [builtins fixtures/paramspec.pyi]
+
+[case testInferenceAgainstGenericCallableUnionParamSpec]
+from typing import Callable, TypeVar, List, Union
+from typing_extensions import ParamSpec
+
+T = TypeVar("T")
+P = ParamSpec("P")
+
+def dec(f: Callable[P, T]) -> Callable[P, List[T]]: ...
+@dec
+def func(arg: T) -> Union[T, str]:
+    ...
+reveal_type(func)  # N: Revealed type is "def [T] (arg: T`-1) -> builtins.list[Union[T`-1, builtins.str]]"
+reveal_type(func(42))  # N: Revealed type is "builtins.list[Union[builtins.int, builtins.str]]"
+
+def dec2(f: Callable[P, List[T]]) -> Callable[P, T]: ...
+@dec2
+def func2(arg: T) -> List[Union[T, str]]:
+    ...
+reveal_type(func2)  # N: Revealed type is "def [T] (arg: T`-1) -> Union[T`-1, builtins.str]"
+reveal_type(func2(42))  # N: Revealed type is "Union[builtins.int, builtins.str]"
+[builtins fixtures/paramspec.pyi]

From c6cb3c6282003dd3dadcf028735f9ba6190a0c84 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Mon, 13 Nov 2023 16:59:50 +0000
Subject: [PATCH 254/288] Ignore position if imprecise arguments are matched by
 name (#16471)

Fixes https://github.com/python/mypy/issues/16405
Fixes https://github.com/python/mypy/issues/16412

Imprecise argument kinds inference was added a while ago to support
various edge cases with `ParamSpec`. This feature required mapping
actual kinds to formal kinds, which is in general undecidable. At that
time we decided to not add much special-casing, and wait for some real
use-cases. So far there are two relevant issues, and it looks like both
of them can be fixed with simple special-casing: ignore argument
positions in subtyping if arguments can be matched by name. This adds
minor unsafety, and generally doesn't look bad, so I think we should go
ahead with it.

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 mypy/subtypes.py                              | 24 ++++++--
 .../unit/check-parameter-specification.test   | 55 +++++++++++++++++++
 2 files changed, 73 insertions(+), 6 deletions(-)

diff --git a/mypy/subtypes.py b/mypy/subtypes.py
index 7e37751b1c15..4fd3f8ff98ca 100644
--- a/mypy/subtypes.py
+++ b/mypy/subtypes.py
@@ -1651,7 +1651,12 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
                 continue
             return False
         if not are_args_compatible(
-            left_arg, right_arg, ignore_pos_arg_names, allow_partial_overlap, is_compat
+            left_arg,
+            right_arg,
+            is_compat,
+            ignore_pos_arg_names=ignore_pos_arg_names,
+            allow_partial_overlap=allow_partial_overlap,
+            allow_imprecise_kinds=right.imprecise_arg_kinds,
         ):
             return False
 
@@ -1676,9 +1681,9 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
             if not are_args_compatible(
                 left_by_position,
                 right_by_position,
-                ignore_pos_arg_names,
-                allow_partial_overlap,
                 is_compat,
+                ignore_pos_arg_names=ignore_pos_arg_names,
+                allow_partial_overlap=allow_partial_overlap,
             ):
                 return False
             i += 1
@@ -1711,7 +1716,11 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
                 continue
 
             if not are_args_compatible(
-                left_by_name, right_by_name, ignore_pos_arg_names, allow_partial_overlap, is_compat
+                left_by_name,
+                right_by_name,
+                is_compat,
+                ignore_pos_arg_names=ignore_pos_arg_names,
+                allow_partial_overlap=allow_partial_overlap,
             ):
                 return False
 
@@ -1735,6 +1744,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
             and right_by_name != right_by_pos
             and (right_by_pos.required or right_by_name.required)
             and strict_concatenate_check
+            and not right.imprecise_arg_kinds
         ):
             return False
 
@@ -1749,9 +1759,11 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N
 def are_args_compatible(
     left: FormalArgument,
     right: FormalArgument,
+    is_compat: Callable[[Type, Type], bool],
+    *,
     ignore_pos_arg_names: bool,
     allow_partial_overlap: bool,
-    is_compat: Callable[[Type, Type], bool],
+    allow_imprecise_kinds: bool = False,
 ) -> bool:
     if left.required and right.required:
         # If both arguments are required allow_partial_overlap has no effect.
@@ -1779,7 +1791,7 @@ def is_different(left_item: object | None, right_item: object | None) -> bool:
             return False
 
     # If right is at a specific position, left must have the same:
-    if is_different(left.pos, right.pos):
+    if is_different(left.pos, right.pos) and not allow_imprecise_kinds:
         return False
 
     # If right's argument is optional, left's must also be
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index eb6fbf07f045..d65a0214b599 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -1687,9 +1687,18 @@ P = ParamSpec("P")
 T = TypeVar("T")
 
 def apply(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> None: ...
+
 def test(x: int) -> int: ...
 apply(apply, test, x=42)  # OK
 apply(apply, test, 42)  # Also OK (but requires some special casing)
+apply(apply, test, "bad")  # E: Argument 1 to "apply" has incompatible type "Callable[[Callable[P, T], **P], None]"; expected "Callable[[Callable[[int], int], str], None]"
+
+def test2(x: int, y: str) -> None: ...
+apply(apply, test2, 42, "yes")
+apply(apply, test2, "no", 42)  # E: Argument 1 to "apply" has incompatible type "Callable[[Callable[P, T], **P], None]"; expected "Callable[[Callable[[int, str], None], str, int], None]"
+apply(apply, test2, x=42, y="yes")
+apply(apply, test2, y="yes", x=42)
+apply(apply, test2, y=42, x="no")  # E: Argument 1 to "apply" has incompatible type "Callable[[Callable[P, T], **P], None]"; expected "Callable[[Callable[[int, str], None], int, str], None]"
 [builtins fixtures/paramspec.pyi]
 
 [case testParamSpecApplyPosVsNamedOptional]
@@ -2087,6 +2096,52 @@ reveal_type(d(b, f1))  # E: Cannot infer type argument 1 of "d" \
 reveal_type(d(b, f2))  # N: Revealed type is "def (builtins.int)"
 [builtins fixtures/paramspec.pyi]
 
+[case testParamSpecGenericWithNamedArg1]
+from typing import Callable, TypeVar
+from typing_extensions import ParamSpec
+
+R = TypeVar("R")
+P = ParamSpec("P")
+
+def run(func: Callable[[], R], *args: object, backend: str = "asyncio") -> R: ...
+class Result: ...
+def run_portal() -> Result: ...
+def submit(func: Callable[P, R], /, *args: P.args, **kwargs: P.kwargs) -> R: ...
+
+reveal_type(submit(  # N: Revealed type is "__main__.Result"
+    run,
+    run_portal,
+    backend="asyncio",
+))
+submit(
+    run,  # E: Argument 1 to "submit" has incompatible type "Callable[[Callable[[], R], VarArg(object), DefaultNamedArg(str, 'backend')], R]"; expected "Callable[[Callable[[], Result], int], Result]"
+    run_portal,
+    backend=int(),
+)
+[builtins fixtures/paramspec.pyi]
+
+[case testParamSpecGenericWithNamedArg2]
+from typing import Callable, TypeVar, Type
+from typing_extensions import ParamSpec
+
+P= ParamSpec("P")
+T = TypeVar("T")
+
+def smoke_testable(*args: P.args, **kwargs: P.kwargs) -> Callable[[Callable[P, T]], Type[T]]:
+    ...
+
+@smoke_testable(name="bob", size=512, flt=0.5)
+class SomeClass:
+    def __init__(self, size: int, name: str, flt: float) -> None:
+        pass
+
+# Error message is confusing, but this is a known issue, see #4530.
+@smoke_testable(name=42, size="bad", flt=0.5)  # E: Argument 1 has incompatible type "Type[OtherClass]"; expected "Callable[[int, str, float], OtherClass]"
+class OtherClass:
+    def __init__(self, size: int, name: str, flt: float) -> None:
+        pass
+[builtins fixtures/paramspec.pyi]
+
 [case testInferenceAgainstGenericCallableUnionParamSpec]
 from typing import Callable, TypeVar, List, Union
 from typing_extensions import ParamSpec

From 0699dde8dc2633861a65ac43701eda09e79de366 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 15 Nov 2023 10:20:25 +0000
Subject: [PATCH 255/288] Fix crash on strict-equality with recursive types
 (#16483)

Fixes https://github.com/python/mypy/issues/16473

Potentially we can turn this helper function into a proper visitor, but
I don't think it is worth it as of right now.

---------

Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
---
 mypy/checkexpr.py                     | 21 +++++++++++++-----
 mypy/meet.py                          | 12 +++++++++-
 test-data/unit/check-expressions.test | 32 +++++++++++++++++++++++++++
 test-data/unit/fixtures/list.pyi      |  1 +
 4 files changed, 60 insertions(+), 6 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index c87d1f6cd31c..da61833bbe5b 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -3617,8 +3617,9 @@ def dangerous_comparison(
         self,
         left: Type,
         right: Type,
-        original_container: Type | None = None,
         *,
+        original_container: Type | None = None,
+        seen_types: set[tuple[Type, Type]] | None = None,
         prefer_literal: bool = True,
     ) -> bool:
         """Check for dangerous non-overlapping comparisons like 42 == 'no'.
@@ -3639,6 +3640,12 @@ def dangerous_comparison(
         if not self.chk.options.strict_equality:
             return False
 
+        if seen_types is None:
+            seen_types = set()
+        if (left, right) in seen_types:
+            return False
+        seen_types.add((left, right))
+
         left, right = get_proper_types((left, right))
 
         # We suppress the error if there is a custom __eq__() method on either
@@ -3694,17 +3701,21 @@ def dangerous_comparison(
                 abstract_set = self.chk.lookup_typeinfo("typing.AbstractSet")
                 left = map_instance_to_supertype(left, abstract_set)
                 right = map_instance_to_supertype(right, abstract_set)
-                return self.dangerous_comparison(left.args[0], right.args[0])
+                return self.dangerous_comparison(
+                    left.args[0], right.args[0], seen_types=seen_types
+                )
             elif left.type.has_base("typing.Mapping") and right.type.has_base("typing.Mapping"):
                 # Similar to above: Mapping ignores the classes, it just compares items.
                 abstract_map = self.chk.lookup_typeinfo("typing.Mapping")
                 left = map_instance_to_supertype(left, abstract_map)
                 right = map_instance_to_supertype(right, abstract_map)
                 return self.dangerous_comparison(
-                    left.args[0], right.args[0]
-                ) or self.dangerous_comparison(left.args[1], right.args[1])
+                    left.args[0], right.args[0], seen_types=seen_types
+                ) or self.dangerous_comparison(left.args[1], right.args[1], seen_types=seen_types)
             elif left_name in ("builtins.list", "builtins.tuple") and right_name == left_name:
-                return self.dangerous_comparison(left.args[0], right.args[0])
+                return self.dangerous_comparison(
+                    left.args[0], right.args[0], seen_types=seen_types
+                )
             elif left_name in OVERLAPPING_BYTES_ALLOWLIST and right_name in (
                 OVERLAPPING_BYTES_ALLOWLIST
             ):
diff --git a/mypy/meet.py b/mypy/meet.py
index 610185d6bbbf..df8b960cdf3f 100644
--- a/mypy/meet.py
+++ b/mypy/meet.py
@@ -262,6 +262,7 @@ def is_overlapping_types(
     ignore_promotions: bool = False,
     prohibit_none_typevar_overlap: bool = False,
     ignore_uninhabited: bool = False,
+    seen_types: set[tuple[Type, Type]] | None = None,
 ) -> bool:
     """Can a value of type 'left' also be of type 'right' or vice-versa?
 
@@ -275,18 +276,27 @@ def is_overlapping_types(
         # A type guard forces the new type even if it doesn't overlap the old.
         return True
 
+    if seen_types is None:
+        seen_types = set()
+    if (left, right) in seen_types:
+        return True
+    if isinstance(left, TypeAliasType) and isinstance(right, TypeAliasType):
+        seen_types.add((left, right))
+
     left, right = get_proper_types((left, right))
 
     def _is_overlapping_types(left: Type, right: Type) -> bool:
         """Encode the kind of overlapping check to perform.
 
-        This function mostly exists so we don't have to repeat keyword arguments everywhere."""
+        This function mostly exists, so we don't have to repeat keyword arguments everywhere.
+        """
         return is_overlapping_types(
             left,
             right,
             ignore_promotions=ignore_promotions,
             prohibit_none_typevar_overlap=prohibit_none_typevar_overlap,
             ignore_uninhabited=ignore_uninhabited,
+            seen_types=seen_types.copy(),
         )
 
     # We should never encounter this type.
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
index 4ac5512580d2..8fe68365e5ac 100644
--- a/test-data/unit/check-expressions.test
+++ b/test-data/unit/check-expressions.test
@@ -2378,6 +2378,38 @@ assert a == b
 [builtins fixtures/dict.pyi]
 [typing fixtures/typing-full.pyi]
 
+[case testStrictEqualityWithRecursiveMapTypes]
+# flags: --strict-equality
+from typing import Dict
+
+R = Dict[str, R]
+
+a: R
+b: R
+assert a == b
+
+R2 = Dict[int, R2]
+c: R2
+assert a == c  # E: Non-overlapping equality check (left operand type: "Dict[str, R]", right operand type: "Dict[int, R2]")
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-full.pyi]
+
+[case testStrictEqualityWithRecursiveListTypes]
+# flags: --strict-equality
+from typing import List, Union
+
+R = List[Union[str, R]]
+
+a: R
+b: R
+assert a == b
+
+R2 = List[Union[int, R2]]
+c: R2
+assert a == c
+[builtins fixtures/list.pyi]
+[typing fixtures/typing-full.pyi]
+
 [case testUnimportedHintAny]
 def f(x: Any) -> None:  # E: Name "Any" is not defined \
                         # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any")
diff --git a/test-data/unit/fixtures/list.pyi b/test-data/unit/fixtures/list.pyi
index 90fbabe8bc92..3dcdf18b2faa 100644
--- a/test-data/unit/fixtures/list.pyi
+++ b/test-data/unit/fixtures/list.pyi
@@ -6,6 +6,7 @@ T = TypeVar('T')
 
 class object:
     def __init__(self) -> None: pass
+    def __eq__(self, other: object) -> bool: pass
 
 class type: pass
 class ellipsis: pass

From b425bd60672881addcd55dfae61993fa2ac03cca Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Thu, 16 Nov 2023 11:56:53 +0000
Subject: [PATCH 256/288] [mypyc] Fix regression with nested functions (#16484)

Fixes https://github.com/python/mypy/issues/16480

Fix is straightforward, but also suspicious, since I am not sure how it
ever worked without this.
---
 mypyc/irbuild/prebuildvisitor.py   |  3 ++-
 mypyc/test-data/run-functions.test | 22 ++++++++++++++++++++++
 2 files changed, 24 insertions(+), 1 deletion(-)

diff --git a/mypyc/irbuild/prebuildvisitor.py b/mypyc/irbuild/prebuildvisitor.py
index 519b3445e925..17f907d42111 100644
--- a/mypyc/irbuild/prebuildvisitor.py
+++ b/mypyc/irbuild/prebuildvisitor.py
@@ -119,9 +119,10 @@ def visit_decorator(self, dec: Decorator) -> None:
                 self.funcs_to_decorators[dec.func] = decorators_to_store
         super().visit_decorator(dec)
 
-    def visit_func_def(self, fdef: FuncItem) -> None:
+    def visit_func_def(self, fdef: FuncDef) -> None:
         # TODO: What about overloaded functions?
         self.visit_func(fdef)
+        self.visit_symbol_node(fdef)
 
     def visit_lambda_expr(self, expr: LambdaExpr) -> None:
         self.visit_func(expr)
diff --git a/mypyc/test-data/run-functions.test b/mypyc/test-data/run-functions.test
index bd8f1a9197dd..cf519f30dad8 100644
--- a/mypyc/test-data/run-functions.test
+++ b/mypyc/test-data/run-functions.test
@@ -1286,3 +1286,25 @@ def bar() -> None:
 bar()
 [out]
 {'__module__': 'native', '__name__': 'bar', '__qualname__': 'bar', '__doc__': None, '__wrapped__': <built-in function bar>}
+
+[case testCallNestedFunctionWithNamed]
+def f() -> None:
+    def a() -> None:
+        pass
+    def b() -> None:
+        a()
+    b()
+[file driver.py]
+from native import f
+f()
+
+[case testCallNestedFunctionWithLambda]
+def f(x: int) -> int:
+    def inc(x: int) -> int:
+        return x + 1
+    return (lambda x: inc(x))(1)
+[file driver.py]
+from native import f
+print(f(1))
+[out]
+2

From 8c8aa10e2976612ca0ca1fa1e5655fbd535de6f7 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 17 Nov 2023 00:48:24 +0000
Subject: [PATCH 257/288] Fix crash on unimported Any in TypedDict (#16510)

Fixes https://github.com/python/mypy/issues/16498

Fix is trivial: no operations can be done with `Required` types before
unwrapping (because they are not real types).

---------

Co-authored-by: Jelle Zijlstra <jelle.zijlstra@gmail.com>
---
 mypy/semanal_typeddict.py           | 20 +++++++++++---------
 test-data/unit/check-typeddict.test | 12 ++++++++++++
 2 files changed, 23 insertions(+), 9 deletions(-)

diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py
index 5104d31f5c26..a013cc040e89 100644
--- a/mypy/semanal_typeddict.py
+++ b/mypy/semanal_typeddict.py
@@ -394,6 +394,17 @@ def check_typeddict(
             types = [  # unwrap Required[T] to just T
                 t.item if isinstance(t, RequiredType) else t for t in types
             ]
+
+            # Perform various validations after unwrapping.
+            for t in types:
+                check_for_explicit_any(
+                    t, self.options, self.api.is_typeshed_stub_file, self.msg, context=call
+                )
+            if self.options.disallow_any_unimported:
+                for t in types:
+                    if has_any_from_unimported_type(t):
+                        self.msg.unimported_type_becomes_any("Type of a TypedDict key", t, call)
+
             existing_info = None
             if isinstance(node.analyzed, TypedDictExpr):
                 existing_info = node.analyzed.info
@@ -451,15 +462,6 @@ def parse_typeddict_args(
             # One of the types is not ready, defer.
             return None
         items, types, ok = res
-        for t in types:
-            check_for_explicit_any(
-                t, self.options, self.api.is_typeshed_stub_file, self.msg, context=call
-            )
-
-        if self.options.disallow_any_unimported:
-            for t in types:
-                if has_any_from_unimported_type(t):
-                    self.msg.unimported_type_becomes_any("Type of a TypedDict key", t, dictexpr)
         assert total is not None
         return args[0].value, items, types, total, tvar_defs, ok
 
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index c1c791304a15..c584906dd965 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -3396,3 +3396,15 @@ reveal_type(b["a"])  # N: Revealed type is "Union[builtins.str, None]"
 reveal_type(b["g"])  # N: Revealed type is "Union[builtins.int, None]"
 [builtins fixtures/dict.pyi]
 [typing fixtures/typing-typeddict.pyi]
+
+[case testNoCrashOnUnImportedAnyNotRequired]
+# flags: --disallow-any-unimported
+from typing import NotRequired, Required, TypedDict
+from thismoduledoesntexist import T  # type: ignore[import]
+
+B = TypedDict("B", {  # E: Type of a TypedDict key becomes "Any" due to an unfollowed import
+    "T1": NotRequired[T],
+    "T2": Required[T],
+})
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-typeddict.pyi]

From 5489fd33f08fd93eff167eda84b00eeab939a419 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Fri, 17 Nov 2023 20:49:33 +0000
Subject: [PATCH 258/288] Fix crash on invalid enum in method (#16511)

Fixes https://github.com/python/mypy/issues/16163

Fix is straightforward: I simply copy the logic we have for invalid
TypedDicts/NamedTuples.
---
 mypy/semanal_enum.py                  | 11 +++++++----
 test-data/unit/check-incremental.test | 23 +++++++++++++++++++++++
 2 files changed, 30 insertions(+), 4 deletions(-)

diff --git a/mypy/semanal_enum.py b/mypy/semanal_enum.py
index cd11204c3bcc..528b0519cca1 100644
--- a/mypy/semanal_enum.py
+++ b/mypy/semanal_enum.py
@@ -106,16 +106,19 @@ class A(enum.Enum):
         items, values, ok = self.parse_enum_call_args(call, fullname.split(".")[-1])
         if not ok:
             # Error. Construct dummy return value.
-            info = self.build_enum_call_typeinfo(var_name, [], fullname, node.line)
+            name = var_name
+            if is_func_scope:
+                name += "@" + str(call.line)
+            info = self.build_enum_call_typeinfo(name, [], fullname, node.line)
         else:
             name = cast(StrExpr, call.args[0]).value
             if name != var_name or is_func_scope:
                 # Give it a unique name derived from the line number.
                 name += "@" + str(call.line)
             info = self.build_enum_call_typeinfo(name, items, fullname, call.line)
-            # Store generated TypeInfo under both names, see semanal_namedtuple for more details.
-            if name != var_name or is_func_scope:
-                self.api.add_symbol_skip_local(name, info)
+        # Store generated TypeInfo under both names, see semanal_namedtuple for more details.
+        if name != var_name or is_func_scope:
+            self.api.add_symbol_skip_local(name, info)
         call.analyzed = EnumCallExpr(info, items, values)
         call.analyzed.set_line(call)
         info.line = node.line
diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test
index 806a585bff39..2c7d908c5f5b 100644
--- a/test-data/unit/check-incremental.test
+++ b/test-data/unit/check-incremental.test
@@ -6560,3 +6560,26 @@ class C:
 [out]
 [out2]
 tmp/a.py:3: note: Revealed type is "TypedDict('b.C.Hidden@5', {'x': builtins.int})"
+
+[case testNoIncrementalCrashOnInvalidEnumMethod]
+import a
+[file a.py]
+from lib import TheClass
+[file a.py.2]
+from lib import TheClass
+x: TheClass
+reveal_type(x.enum_type)
+[file lib.py]
+import enum
+
+class TheClass:
+    def __init__(self) -> None:
+        names = ["foo"]
+        pyenum = enum.Enum('Blah', {  # type: ignore[misc]
+            x.upper(): x
+            for x in names
+        })
+        self.enum_type = pyenum
+[out]
+[out2]
+tmp/a.py:3: note: Revealed type is "def (value: builtins.object) -> lib.TheClass.pyenum@6"

From 1cc62a211d21cca2d3bf44957d1635d0a02fef30 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
 <41898282+github-actions[bot]@users.noreply.github.com>
Date: Fri, 17 Nov 2023 13:08:54 -0800
Subject: [PATCH 259/288] Sync typeshed (#16493)

Source commit:

https://github.com/python/typeshed/commit/643d911f4fb434176c7f6a342db9109431259273
---
 mypy/typeshed/stdlib/_ast.pyi                 |  2 +-
 mypy/typeshed/stdlib/_locale.pyi              |  4 +-
 mypy/typeshed/stdlib/_typeshed/__init__.pyi   |  4 ++
 mypy/typeshed/stdlib/_typeshed/wsgi.pyi       |  2 +-
 mypy/typeshed/stdlib/_typeshed/xml.pyi        |  6 +--
 mypy/typeshed/stdlib/_warnings.pyi            | 31 ++++++++++--
 mypy/typeshed/stdlib/argparse.pyi             |  2 +-
 mypy/typeshed/stdlib/ast.pyi                  | 11 +++--
 mypy/typeshed/stdlib/asyncio/base_events.pyi  |  2 +-
 mypy/typeshed/stdlib/asyncio/events.pyi       |  2 +-
 .../stdlib/asyncio/proactor_events.pyi        | 13 +----
 mypy/typeshed/stdlib/asyncio/subprocess.pyi   |  6 +--
 mypy/typeshed/stdlib/asyncio/unix_events.pyi  |  9 +---
 .../typeshed/stdlib/asyncio/windows_utils.pyi | 12 +----
 mypy/typeshed/stdlib/builtins.pyi             | 18 +++----
 mypy/typeshed/stdlib/cmath.pyi                |  2 +-
 mypy/typeshed/stdlib/codecs.pyi               |  4 +-
 mypy/typeshed/stdlib/collections/__init__.pyi |  7 +++
 mypy/typeshed/stdlib/compileall.pyi           |  2 +-
 mypy/typeshed/stdlib/contextlib.pyi           |  4 +-
 mypy/typeshed/stdlib/contextvars.pyi          | 15 ++----
 mypy/typeshed/stdlib/dbm/__init__.pyi         |  2 +-
 mypy/typeshed/stdlib/doctest.pyi              |  4 +-
 mypy/typeshed/stdlib/email/headerregistry.pyi |  4 +-
 mypy/typeshed/stdlib/http/client.pyi          |  2 +-
 mypy/typeshed/stdlib/imghdr.pyi               |  4 +-
 mypy/typeshed/stdlib/imp.pyi                  |  2 +-
 mypy/typeshed/stdlib/importlib/abc.pyi        |  8 ++--
 mypy/typeshed/stdlib/ipaddress.pyi            |  4 +-
 mypy/typeshed/stdlib/itertools.pyi            | 15 ++++--
 mypy/typeshed/stdlib/locale.pyi               |  4 +-
 mypy/typeshed/stdlib/logging/__init__.pyi     | 14 +++++-
 mypy/typeshed/stdlib/multiprocessing/pool.pyi |  4 +-
 .../stdlib/multiprocessing/sharedctypes.pyi   |  2 +-
 mypy/typeshed/stdlib/os/__init__.pyi          | 24 ++++++----
 mypy/typeshed/stdlib/smtplib.pyi              |  4 +-
 mypy/typeshed/stdlib/sqlite3/dbapi2.pyi       |  6 +--
 mypy/typeshed/stdlib/subprocess.pyi           | 48 +++++++++----------
 mypy/typeshed/stdlib/sys/__init__.pyi         |  4 +-
 mypy/typeshed/stdlib/tkinter/__init__.pyi     | 19 ++++----
 mypy/typeshed/stdlib/tkinter/dnd.pyi          |  2 +-
 mypy/typeshed/stdlib/types.pyi                |  2 +-
 mypy/typeshed/stdlib/typing.pyi               |  6 +--
 mypy/typeshed/stdlib/unittest/case.pyi        | 22 ++++-----
 mypy/typeshed/stdlib/unittest/main.pyi        |  2 +-
 mypy/typeshed/stdlib/unittest/util.pyi        |  4 +-
 mypy/typeshed/stdlib/urllib/request.pyi       |  3 +-
 .../typeshed/stdlib/xml/etree/ElementTree.pyi |  4 +-
 mypy/typeshed/stdlib/xml/sax/saxutils.pyi     |  2 +-
 49 files changed, 208 insertions(+), 171 deletions(-)

diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi
index 402b770c0462..0302133fc6f9 100644
--- a/mypy/typeshed/stdlib/_ast.pyi
+++ b/mypy/typeshed/stdlib/_ast.pyi
@@ -553,7 +553,7 @@ if sys.version_info >= (3, 10):
 
     class MatchSingleton(pattern):
         __match_args__ = ("value",)
-        value: Literal[True, False, None]
+        value: Literal[True, False] | None
 
     class MatchSequence(pattern):
         __match_args__ = ("patterns",)
diff --git a/mypy/typeshed/stdlib/_locale.pyi b/mypy/typeshed/stdlib/_locale.pyi
index 2b2fe03e4510..d7399f15e1a3 100644
--- a/mypy/typeshed/stdlib/_locale.pyi
+++ b/mypy/typeshed/stdlib/_locale.pyi
@@ -1,6 +1,6 @@
 import sys
 from _typeshed import StrPath
-from collections.abc import Iterable, Mapping
+from collections.abc import Mapping
 
 LC_CTYPE: int
 LC_COLLATE: int
@@ -10,7 +10,7 @@ LC_NUMERIC: int
 LC_ALL: int
 CHAR_MAX: int
 
-def setlocale(category: int, locale: str | Iterable[str | None] | None = None) -> str: ...
+def setlocale(__category: int, __locale: str | None = None) -> str: ...
 def localeconv() -> Mapping[str, int | str | list[int]]: ...
 
 if sys.version_info >= (3, 11):
diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi
index ad214a2a5e0d..33659cf31a12 100644
--- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi
+++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi
@@ -236,6 +236,10 @@ class SupportsNoArgReadline(Protocol[_T_co]):
 class SupportsWrite(Protocol[_T_contra]):
     def write(self, __s: _T_contra) -> object: ...
 
+# stable
+class SupportsFlush(Protocol):
+    def flush(self) -> object: ...
+
 # Unfortunately PEP 688 does not allow us to distinguish read-only
 # from writable buffers. We use these aliases for readability for now.
 # Perhaps a future extension of the buffer protocol will allow us to
diff --git a/mypy/typeshed/stdlib/_typeshed/wsgi.pyi b/mypy/typeshed/stdlib/_typeshed/wsgi.pyi
index de731aea918b..e8ebf6409e7f 100644
--- a/mypy/typeshed/stdlib/_typeshed/wsgi.pyi
+++ b/mypy/typeshed/stdlib/_typeshed/wsgi.pyi
@@ -11,7 +11,7 @@ from typing import Any, Protocol
 from typing_extensions import TypeAlias
 
 class _Readable(Protocol):
-    def read(self, size: int = ...) -> bytes: ...
+    def read(self, __size: int = ...) -> bytes: ...
     # Optional: def close(self) -> object: ...
 
 if sys.version_info >= (3, 11):
diff --git a/mypy/typeshed/stdlib/_typeshed/xml.pyi b/mypy/typeshed/stdlib/_typeshed/xml.pyi
index 231c2b86e912..46c5fab097c4 100644
--- a/mypy/typeshed/stdlib/_typeshed/xml.pyi
+++ b/mypy/typeshed/stdlib/_typeshed/xml.pyi
@@ -4,6 +4,6 @@ from typing import Any, Protocol
 
 # As defined https://docs.python.org/3/library/xml.dom.html#domimplementation-objects
 class DOMImplementation(Protocol):
-    def hasFeature(self, feature: str, version: str | None) -> bool: ...
-    def createDocument(self, namespaceUri: str, qualifiedName: str, doctype: Any | None) -> Any: ...
-    def createDocumentType(self, qualifiedName: str, publicId: str, systemId: str) -> Any: ...
+    def hasFeature(self, __feature: str, __version: str | None) -> bool: ...
+    def createDocument(self, __namespaceUri: str, __qualifiedName: str, __doctype: Any | None) -> Any: ...
+    def createDocumentType(self, __qualifiedName: str, __publicId: str, __systemId: str) -> Any: ...
diff --git a/mypy/typeshed/stdlib/_warnings.pyi b/mypy/typeshed/stdlib/_warnings.pyi
index 0981dfeaafee..2e571e676c97 100644
--- a/mypy/typeshed/stdlib/_warnings.pyi
+++ b/mypy/typeshed/stdlib/_warnings.pyi
@@ -1,13 +1,36 @@
+import sys
 from typing import Any, overload
 
 _defaultaction: str
 _onceregistry: dict[Any, Any]
 filters: list[tuple[str, str | None, type[Warning], str | None, int]]
 
-@overload
-def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: ...
-@overload
-def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ...
+if sys.version_info >= (3, 12):
+    @overload
+    def warn(
+        message: str,
+        category: type[Warning] | None = None,
+        stacklevel: int = 1,
+        source: Any | None = None,
+        *,
+        skip_file_prefixes: tuple[str, ...] = (),
+    ) -> None: ...
+    @overload
+    def warn(
+        message: Warning,
+        category: Any = None,
+        stacklevel: int = 1,
+        source: Any | None = None,
+        *,
+        skip_file_prefixes: tuple[str, ...] = (),
+    ) -> None: ...
+
+else:
+    @overload
+    def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: ...
+    @overload
+    def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ...
+
 @overload
 def warn_explicit(
     message: str,
diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi
index 924cc8986114..e947f67edd55 100644
--- a/mypy/typeshed/stdlib/argparse.pyi
+++ b/mypy/typeshed/stdlib/argparse.pyi
@@ -120,7 +120,7 @@ class _ActionsContainer:
     def _handle_conflict_resolve(self, action: Action, conflicting_actions: Iterable[tuple[str, Action]]) -> None: ...
 
 class _FormatterClass(Protocol):
-    def __call__(self, prog: str) -> HelpFormatter: ...
+    def __call__(self, *, prog: str) -> HelpFormatter: ...
 
 class ArgumentParser(_AttributeHolder, _ActionsContainer):
     prog: str
diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi
index a61b4e35fd56..5c9cafc189be 100644
--- a/mypy/typeshed/stdlib/ast.pyi
+++ b/mypy/typeshed/stdlib/ast.pyi
@@ -4,27 +4,30 @@ from _ast import *
 from _typeshed import ReadableBuffer, Unused
 from collections.abc import Iterator
 from typing import Any, TypeVar as _TypeVar, overload
-from typing_extensions import Literal
+from typing_extensions import Literal, deprecated
 
 if sys.version_info >= (3, 8):
     class _ABC(type):
         if sys.version_info >= (3, 9):
             def __init__(cls, *args: Unused) -> None: ...
 
+    @deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
     class Num(Constant, metaclass=_ABC):
         value: int | float | complex
-
+    @deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
     class Str(Constant, metaclass=_ABC):
         value: str
         # Aliases for value, for backwards compatibility
         s: str
-
+    @deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
     class Bytes(Constant, metaclass=_ABC):
         value: bytes
         # Aliases for value, for backwards compatibility
         s: bytes
-
+    @deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
     class NameConstant(Constant, metaclass=_ABC): ...
+
+    @deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
     class Ellipsis(Constant, metaclass=_ABC): ...
 
 if sys.version_info >= (3, 9):
diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi
index e2b55da8c718..cdf295d510d4 100644
--- a/mypy/typeshed/stdlib/asyncio/base_events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi
@@ -423,7 +423,7 @@ class BaseEventLoop(AbstractEventLoop):
         bufsize: Literal[0] = 0,
         encoding: None = None,
         errors: None = None,
-        text: Literal[False, None] = None,
+        text: Literal[False] | None = None,
         **kwargs: Any,
     ) -> tuple[SubprocessTransport, _ProtocolT]: ...
     async def subprocess_exec(
diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi
index cde63b279b0d..4c62043875ba 100644
--- a/mypy/typeshed/stdlib/asyncio/events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/events.pyi
@@ -522,7 +522,7 @@ class AbstractEventLoop:
         bufsize: Literal[0] = 0,
         encoding: None = None,
         errors: None = None,
-        text: Literal[False, None] = ...,
+        text: Literal[False] | None = ...,
         **kwargs: Any,
     ) -> tuple[SubprocessTransport, _ProtocolT]: ...
     @abstractmethod
diff --git a/mypy/typeshed/stdlib/asyncio/proactor_events.pyi b/mypy/typeshed/stdlib/asyncio/proactor_events.pyi
index 33fdf84ade4a..4634bbb2b37c 100644
--- a/mypy/typeshed/stdlib/asyncio/proactor_events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/proactor_events.pyi
@@ -1,19 +1,13 @@
 import sys
 from collections.abc import Mapping
 from socket import socket
-from typing import Any, ClassVar, Protocol
+from typing import Any, ClassVar
 from typing_extensions import Literal
 
 from . import base_events, constants, events, futures, streams, transports
 
 __all__ = ("BaseProactorEventLoop",)
 
-if sys.version_info >= (3, 8):
-    class _WarnCallbackProtocol(Protocol):
-        def __call__(
-            self, message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ...
-        ) -> object: ...
-
 class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport):
     def __init__(
         self,
@@ -24,10 +18,7 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTr
         extra: Mapping[Any, Any] | None = None,
         server: events.AbstractServer | None = None,
     ) -> None: ...
-    if sys.version_info >= (3, 8):
-        def __del__(self, _warn: _WarnCallbackProtocol = ...) -> None: ...
-    else:
-        def __del__(self) -> None: ...
+    def __del__(self) -> None: ...
 
 class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport):
     if sys.version_info >= (3, 10):
diff --git a/mypy/typeshed/stdlib/asyncio/subprocess.pyi b/mypy/typeshed/stdlib/asyncio/subprocess.pyi
index b8877b360527..9b7c82e689bf 100644
--- a/mypy/typeshed/stdlib/asyncio/subprocess.pyi
+++ b/mypy/typeshed/stdlib/asyncio/subprocess.pyi
@@ -54,7 +54,7 @@ if sys.version_info >= (3, 11):
         bufsize: Literal[0] = 0,
         encoding: None = None,
         errors: None = None,
-        text: Literal[False, None] = None,
+        text: Literal[False] | None = None,
         # These parameters are taken by subprocess.Popen, which this ultimately delegates to
         executable: StrOrBytesPath | None = None,
         preexec_fn: Callable[[], Any] | None = None,
@@ -120,7 +120,7 @@ elif sys.version_info >= (3, 10):
         bufsize: Literal[0] = 0,
         encoding: None = None,
         errors: None = None,
-        text: Literal[False, None] = None,
+        text: Literal[False] | None = None,
         # These parameters are taken by subprocess.Popen, which this ultimately delegates to
         executable: StrOrBytesPath | None = None,
         preexec_fn: Callable[[], Any] | None = None,
@@ -185,7 +185,7 @@ else:  # >= 3.9
         bufsize: Literal[0] = 0,
         encoding: None = None,
         errors: None = None,
-        text: Literal[False, None] = None,
+        text: Literal[False] | None = None,
         # These parameters are taken by subprocess.Popen, which this ultimately delegates to
         executable: StrOrBytesPath | None = None,
         preexec_fn: Callable[[], Any] | None = None,
diff --git a/mypy/typeshed/stdlib/asyncio/unix_events.pyi b/mypy/typeshed/stdlib/asyncio/unix_events.pyi
index e28d64b5287b..dc3d3496ae55 100644
--- a/mypy/typeshed/stdlib/asyncio/unix_events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/unix_events.pyi
@@ -84,13 +84,6 @@ if sys.platform != "win32":
     DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy
 
     if sys.version_info >= (3, 8):
-        from typing import Protocol
-
-        class _Warn(Protocol):
-            def __call__(
-                self, message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ...
-            ) -> object: ...
-
         class MultiLoopChildWatcher(AbstractChildWatcher):
             def is_active(self) -> bool: ...
             def close(self) -> None: ...
@@ -109,7 +102,7 @@ if sys.platform != "win32":
             def __exit__(
                 self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
             ) -> None: ...
-            def __del__(self, _warn: _Warn = ...) -> None: ...
+            def __del__(self) -> None: ...
             def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
             def remove_child_handler(self, pid: int) -> bool: ...
             def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
diff --git a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi
index 9f88718b7b70..ed5d8da275c5 100644
--- a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi
+++ b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi
@@ -2,16 +2,12 @@ import subprocess
 import sys
 from collections.abc import Callable
 from types import TracebackType
-from typing import Any, AnyStr, Protocol
+from typing import Any, AnyStr
 from typing_extensions import Literal, Self
 
 if sys.platform == "win32":
     __all__ = ("pipe", "Popen", "PIPE", "PipeHandle")
 
-    class _WarnFunction(Protocol):
-        def __call__(
-            self, message: str, category: type[Warning] = ..., stacklevel: int = ..., source: PipeHandle = ...
-        ) -> object: ...
     BUFSIZE: Literal[8192]
     PIPE = subprocess.PIPE
     STDOUT = subprocess.STDOUT
@@ -19,11 +15,7 @@ if sys.platform == "win32":
 
     class PipeHandle:
         def __init__(self, handle: int) -> None: ...
-        if sys.version_info >= (3, 8):
-            def __del__(self, _warn: _WarnFunction = ...) -> None: ...
-        else:
-            def __del__(self) -> None: ...
-
+        def __del__(self) -> None: ...
         def __enter__(self) -> Self: ...
         def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
         @property
diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index 96a1d1e31b17..4f04b6286258 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -18,6 +18,7 @@ from _typeshed import (
     SupportsAiter,
     SupportsAnext,
     SupportsDivMod,
+    SupportsFlush,
     SupportsIter,
     SupportsKeysAndGetItem,
     SupportsLenAndGetItem,
@@ -1194,7 +1195,7 @@ if sys.version_info >= (3, 10):
     # See discussion in #7491 and pure-Python implementation of `anext` at https://github.com/python/cpython/blob/ea786a882b9ed4261eafabad6011bc7ef3b5bf94/Lib/test/test_asyncgen.py#L52-L80
     def anext(__i: _SupportsSynchronousAnext[_AwaitableT]) -> _AwaitableT: ...
     @overload
-    async def anext(__i: SupportsAnext[_T], default: _VT) -> _T | _VT: ...
+    async def anext(__i: SupportsAnext[_T], __default: _VT) -> _T | _VT: ...
 
 # compile() returns a CodeType, unless the flags argument includes PyCF_ONLY_AST (=1024),
 # in which case it returns ast.AST. We have overloads for flag 0 (the default) and for
@@ -1340,9 +1341,9 @@ def getattr(__o: object, __name: str, __default: None) -> Any | None: ...
 @overload
 def getattr(__o: object, __name: str, __default: bool) -> Any | bool: ...
 @overload
-def getattr(__o: object, name: str, __default: list[Any]) -> Any | list[Any]: ...
+def getattr(__o: object, __name: str, __default: list[Any]) -> Any | list[Any]: ...
 @overload
-def getattr(__o: object, name: str, __default: dict[Any, Any]) -> Any | dict[Any, Any]: ...
+def getattr(__o: object, __name: str, __default: dict[Any, Any]) -> Any | dict[Any, Any]: ...
 @overload
 def getattr(__o: object, __name: str, __default: _T) -> Any | _T: ...
 def globals() -> dict[str, Any]: ...
@@ -1357,13 +1358,13 @@ class _GetItemIterable(Protocol[_T_co]):
     def __getitem__(self, __i: int) -> _T_co: ...
 
 @overload
-def iter(__iterable: SupportsIter[_SupportsNextT]) -> _SupportsNextT: ...
+def iter(__object: SupportsIter[_SupportsNextT]) -> _SupportsNextT: ...
 @overload
-def iter(__iterable: _GetItemIterable[_T]) -> Iterator[_T]: ...
+def iter(__object: _GetItemIterable[_T]) -> Iterator[_T]: ...
 @overload
-def iter(__function: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: ...
+def iter(__object: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: ...
 @overload
-def iter(__function: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ...
+def iter(__object: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ...
 
 # Keep this alias in sync with unittest.case._ClassInfo
 if sys.version_info >= (3, 10):
@@ -1544,8 +1545,7 @@ def open(
 ) -> IO[Any]: ...
 def ord(__c: str | bytes | bytearray) -> int: ...
 
-class _SupportsWriteAndFlush(SupportsWrite[_T_contra], Protocol[_T_contra]):
-    def flush(self) -> None: ...
+class _SupportsWriteAndFlush(SupportsWrite[_T_contra], SupportsFlush, Protocol[_T_contra]): ...
 
 @overload
 def print(
diff --git a/mypy/typeshed/stdlib/cmath.pyi b/mypy/typeshed/stdlib/cmath.pyi
index 0a85600e99b7..658cfb2d40ed 100644
--- a/mypy/typeshed/stdlib/cmath.pyi
+++ b/mypy/typeshed/stdlib/cmath.pyi
@@ -30,7 +30,7 @@ def exp(__z: _C) -> complex: ...
 def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: ...
 def isinf(__z: _C) -> bool: ...
 def isnan(__z: _C) -> bool: ...
-def log(__x: _C, __y_obj: _C = ...) -> complex: ...
+def log(__x: _C, __base: _C = ...) -> complex: ...
 def log10(__z: _C) -> complex: ...
 def phase(__z: _C) -> float: ...
 def polar(__z: _C) -> tuple[float, float]: ...
diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi
index f8c92392a599..985a52702bc8 100644
--- a/mypy/typeshed/stdlib/codecs.pyi
+++ b/mypy/typeshed/stdlib/codecs.pyi
@@ -213,7 +213,7 @@ class StreamWriter(Codec):
     def reset(self) -> None: ...
     def __enter__(self) -> Self: ...
     def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ...
-    def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ...
+    def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: ...
 
 class StreamReader(Codec):
     stream: _ReadableStream
@@ -227,7 +227,7 @@ class StreamReader(Codec):
     def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ...
     def __iter__(self) -> Self: ...
     def __next__(self) -> str: ...
-    def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ...
+    def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: ...
 
 # Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing
 # and delegates attributes to the underlying binary stream with __getattr__.
diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi
index bb214b5ea19b..bb51dec50cab 100644
--- a/mypy/typeshed/stdlib/collections/__init__.pyi
+++ b/mypy/typeshed/stdlib/collections/__init__.pyi
@@ -372,6 +372,13 @@ class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
     def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ...
     @overload
     def setdefault(self, key: _KT, default: _VT) -> _VT: ...
+    # Same as dict.pop, but accepts keyword arguments
+    @overload
+    def pop(self, key: _KT) -> _VT: ...
+    @overload
+    def pop(self, key: _KT, default: _VT) -> _VT: ...
+    @overload
+    def pop(self, key: _KT, default: _T) -> _VT | _T: ...
     def __eq__(self, __value: object) -> bool: ...
     if sys.version_info >= (3, 9):
         @overload
diff --git a/mypy/typeshed/stdlib/compileall.pyi b/mypy/typeshed/stdlib/compileall.pyi
index 7520c2f5b676..7f101bf79f6d 100644
--- a/mypy/typeshed/stdlib/compileall.pyi
+++ b/mypy/typeshed/stdlib/compileall.pyi
@@ -6,7 +6,7 @@ from typing import Any, Protocol
 __all__ = ["compile_dir", "compile_file", "compile_path"]
 
 class _SupportsSearch(Protocol):
-    def search(self, string: str) -> Any: ...
+    def search(self, __string: str) -> Any: ...
 
 if sys.version_info >= (3, 10):
     def compile_dir(
diff --git a/mypy/typeshed/stdlib/contextlib.pyi b/mypy/typeshed/stdlib/contextlib.pyi
index c1bfedd2d1da..ce46d0d39830 100644
--- a/mypy/typeshed/stdlib/contextlib.pyi
+++ b/mypy/typeshed/stdlib/contextlib.pyi
@@ -56,7 +56,7 @@ class AbstractAsyncContextManager(Protocol[_T_co]):
 class ContextDecorator:
     def __call__(self, func: _F) -> _F: ...
 
-class _GeneratorContextManager(AbstractContextManager[_T_co], ContextDecorator, Generic[_T_co]):
+class _GeneratorContextManager(AbstractContextManager[_T_co], ContextDecorator):
     # __init__ and all instance attributes are actually inherited from _GeneratorContextManagerBase
     # _GeneratorContextManagerBase is more trouble than it's worth to include in the stub; see #6676
     def __init__(self, func: Callable[..., Iterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ...
@@ -81,7 +81,7 @@ if sys.version_info >= (3, 10):
     class AsyncContextDecorator:
         def __call__(self, func: _AF) -> _AF: ...
 
-    class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co], AsyncContextDecorator, Generic[_T_co]):
+    class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co], AsyncContextDecorator):
         # __init__ and these attributes are actually defined in the base class _GeneratorContextManagerBase,
         # which is more trouble than it's worth to include in the stub (see #6676)
         def __init__(self, func: Callable[..., AsyncIterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ...
diff --git a/mypy/typeshed/stdlib/contextvars.pyi b/mypy/typeshed/stdlib/contextvars.pyi
index 63b5f80aea6c..a67d0349b46a 100644
--- a/mypy/typeshed/stdlib/contextvars.pyi
+++ b/mypy/typeshed/stdlib/contextvars.pyi
@@ -23,17 +23,10 @@ class ContextVar(Generic[_T]):
     def name(self) -> str: ...
     @overload
     def get(self) -> _T: ...
-    if sys.version_info >= (3, 8):
-        @overload
-        def get(self, default: _T) -> _T: ...
-        @overload
-        def get(self, default: _D) -> _D | _T: ...
-    else:
-        @overload
-        def get(self, __default: _T) -> _T: ...
-        @overload
-        def get(self, __default: _D) -> _D | _T: ...
-
+    @overload
+    def get(self, __default: _T) -> _T: ...
+    @overload
+    def get(self, __default: _D) -> _D | _T: ...
     def set(self, __value: _T) -> Token[_T]: ...
     def reset(self, __token: Token[_T]) -> None: ...
     if sys.version_info >= (3, 9):
diff --git a/mypy/typeshed/stdlib/dbm/__init__.pyi b/mypy/typeshed/stdlib/dbm/__init__.pyi
index 0068d67b6ad1..d7115528868b 100644
--- a/mypy/typeshed/stdlib/dbm/__init__.pyi
+++ b/mypy/typeshed/stdlib/dbm/__init__.pyi
@@ -90,5 +90,5 @@ class _error(Exception): ...
 
 error: tuple[type[_error], type[OSError]]
 
-def whichdb(filename: str) -> str: ...
+def whichdb(filename: str) -> str | None: ...
 def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ...
diff --git a/mypy/typeshed/stdlib/doctest.pyi b/mypy/typeshed/stdlib/doctest.pyi
index f3c05781ad92..7e334ef0c504 100644
--- a/mypy/typeshed/stdlib/doctest.pyi
+++ b/mypy/typeshed/stdlib/doctest.pyi
@@ -206,8 +206,8 @@ class DocTestCase(unittest.TestCase):
         self,
         test: DocTest,
         optionflags: int = 0,
-        setUp: Callable[[DocTest], Any] | None = None,
-        tearDown: Callable[[DocTest], Any] | None = None,
+        setUp: Callable[[DocTest], object] | None = None,
+        tearDown: Callable[[DocTest], object] | None = None,
         checker: OutputChecker | None = None,
     ) -> None: ...
     def runTest(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/email/headerregistry.pyi b/mypy/typeshed/stdlib/email/headerregistry.pyi
index e158e89818f7..94623e96f208 100644
--- a/mypy/typeshed/stdlib/email/headerregistry.pyi
+++ b/mypy/typeshed/stdlib/email/headerregistry.pyi
@@ -143,9 +143,9 @@ if sys.version_info >= (3, 8):
 class _HeaderParser(Protocol):
     max_count: ClassVar[Literal[1] | None]
     @staticmethod
-    def value_parser(value: str) -> TokenList: ...
+    def value_parser(__value: str) -> TokenList: ...
     @classmethod
-    def parse(cls, value: str, kwds: dict[str, Any]) -> None: ...
+    def parse(cls, __value: str, __kwds: dict[str, Any]) -> None: ...
 
 class HeaderRegistry:
     registry: dict[str, type[_HeaderParser]]
diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi
index 3e5e496ab501..305568fce6cf 100644
--- a/mypy/typeshed/stdlib/http/client.pyi
+++ b/mypy/typeshed/stdlib/http/client.pyi
@@ -176,7 +176,7 @@ class HTTPConnection:
     def connect(self) -> None: ...
     def close(self) -> None: ...
     def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: ...
-    def putheader(self, header: str, *argument: str) -> None: ...
+    def putheader(self, header: str | bytes, *argument: str | bytes) -> None: ...
     def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: ...
     def send(self, data: _DataType | str) -> None: ...
 
diff --git a/mypy/typeshed/stdlib/imghdr.pyi b/mypy/typeshed/stdlib/imghdr.pyi
index ed3647f20fc5..d0960a5a1c5c 100644
--- a/mypy/typeshed/stdlib/imghdr.pyi
+++ b/mypy/typeshed/stdlib/imghdr.pyi
@@ -6,8 +6,8 @@ __all__ = ["what"]
 
 class _ReadableBinary(Protocol):
     def tell(self) -> int: ...
-    def read(self, size: int) -> bytes: ...
-    def seek(self, offset: int) -> Any: ...
+    def read(self, __size: int) -> bytes: ...
+    def seek(self, __offset: int) -> Any: ...
 
 @overload
 def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: ...
diff --git a/mypy/typeshed/stdlib/imp.pyi b/mypy/typeshed/stdlib/imp.pyi
index 3f2920de9c2b..b532f480fa13 100644
--- a/mypy/typeshed/stdlib/imp.pyi
+++ b/mypy/typeshed/stdlib/imp.pyi
@@ -45,7 +45,7 @@ class _FileLike(Protocol):
     def read(self) -> str | bytes: ...
     def close(self) -> Any: ...
     def __enter__(self) -> Any: ...
-    def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> Any: ...
+    def __exit__(self, __typ: type[BaseException] | None, __exc: BaseException | None, __tb: TracebackType | None) -> Any: ...
 
 # PathLike doesn't work for the pathname argument here
 def load_source(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ...
diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi
index 438dbafb48c3..8c395f8a18af 100644
--- a/mypy/typeshed/stdlib/importlib/abc.pyi
+++ b/mypy/typeshed/stdlib/importlib/abc.pyi
@@ -69,7 +69,7 @@ if sys.version_info >= (3, 12):
         def invalidate_caches(self) -> None: ...
         # Not defined on the actual class, but expected to exist.
         def find_spec(
-            self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ...
+            self, __fullname: str, __path: Sequence[str] | None, __target: types.ModuleType | None = ...
         ) -> ModuleSpec | None: ...
 
     class PathEntryFinder(metaclass=ABCMeta):
@@ -84,7 +84,7 @@ else:
         def invalidate_caches(self) -> None: ...
         # Not defined on the actual class, but expected to exist.
         def find_spec(
-            self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ...
+            self, __fullname: str, __path: Sequence[str] | None, __target: types.ModuleType | None = ...
         ) -> ModuleSpec | None: ...
 
     class PathEntryFinder(Finder):
@@ -138,10 +138,10 @@ if sys.version_info >= (3, 9):
         # which is not the case.
         @overload
         @abstractmethod
-        def open(self, mode: Literal["r", "w"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ...
+        def open(self, __mode: Literal["r", "w"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ...
         @overload
         @abstractmethod
-        def open(self, mode: Literal["rb", "wb"]) -> IO[bytes]: ...
+        def open(self, __mode: Literal["rb", "wb"]) -> IO[bytes]: ...
         @property
         @abstractmethod
         def name(self) -> str: ...
diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi
index 945e8bcbbdee..13a8c4330a50 100644
--- a/mypy/typeshed/stdlib/ipaddress.pyi
+++ b/mypy/typeshed/stdlib/ipaddress.pyi
@@ -1,5 +1,5 @@
 import sys
-from collections.abc import Container, Iterable, Iterator
+from collections.abc import Iterable, Iterator
 from typing import Any, Generic, SupportsInt, TypeVar, overload
 from typing_extensions import Literal, Self, TypeAlias
 
@@ -70,7 +70,7 @@ class _BaseAddress(_IPAddressBase, SupportsInt):
     @property
     def packed(self) -> bytes: ...
 
-class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]):
+class _BaseNetwork(_IPAddressBase, Generic[_A]):
     network_address: _A
     netmask: _A
     def __init__(self, address: object, strict: bool = ...) -> None: ...
diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi
index ffa8e19391dd..55f9c92d8cac 100644
--- a/mypy/typeshed/stdlib/itertools.pyi
+++ b/mypy/typeshed/stdlib/itertools.pyi
@@ -241,10 +241,19 @@ class product(Iterator[_T_co]):
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T_co: ...
 
-class permutations(Iterator[tuple[_T, ...]], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T], r: int | None = ...) -> None: ...
+class permutations(Iterator[_T_co]):
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> permutations[tuple[_T, _T]]: ...
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: Literal[3]) -> permutations[tuple[_T, _T, _T]]: ...
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: Literal[4]) -> permutations[tuple[_T, _T, _T, _T]]: ...
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> permutations[tuple[_T, _T, _T, _T, _T]]: ...
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: int | None = ...) -> permutations[tuple[_T, ...]]: ...
     def __iter__(self) -> Self: ...
-    def __next__(self) -> tuple[_T, ...]: ...
+    def __next__(self) -> _T_co: ...
 
 class combinations(Iterator[_T_co]):
     @overload
diff --git a/mypy/typeshed/stdlib/locale.pyi b/mypy/typeshed/stdlib/locale.pyi
index 2e95c659dbcd..c18523e04361 100644
--- a/mypy/typeshed/stdlib/locale.pyi
+++ b/mypy/typeshed/stdlib/locale.pyi
@@ -8,7 +8,6 @@ from _locale import (
     LC_NUMERIC as LC_NUMERIC,
     LC_TIME as LC_TIME,
     localeconv as localeconv,
-    setlocale as setlocale,
     strcoll as strcoll,
     strxfrm as strxfrm,
 )
@@ -16,7 +15,7 @@ from _locale import (
 # This module defines a function "str()", which is why "str" can't be used
 # as a type annotation or type alias.
 from builtins import str as _str
-from collections.abc import Callable
+from collections.abc import Callable, Iterable
 from decimal import Decimal
 from typing import Any
 
@@ -131,6 +130,7 @@ def getdefaultlocale(
     envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE")
 ) -> tuple[_str | None, _str | None]: ...
 def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ...
+def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ...
 def getpreferredencoding(do_setlocale: bool = True) -> _str: ...
 def normalize(localename: _str) -> _str: ...
 def resetlocale(category: int = ...) -> None: ...
diff --git a/mypy/typeshed/stdlib/logging/__init__.pyi b/mypy/typeshed/stdlib/logging/__init__.pyi
index db797d4180ea..5a72b1fcd799 100644
--- a/mypy/typeshed/stdlib/logging/__init__.pyi
+++ b/mypy/typeshed/stdlib/logging/__init__.pyi
@@ -7,7 +7,7 @@ from re import Pattern
 from string import Template
 from time import struct_time
 from types import FrameType, TracebackType
-from typing import Any, ClassVar, Generic, TextIO, TypeVar, overload
+from typing import Any, ClassVar, Generic, Protocol, TextIO, TypeVar, overload
 from typing_extensions import Literal, Self, TypeAlias
 
 if sys.version_info >= (3, 11):
@@ -66,10 +66,20 @@ if sys.version_info >= (3, 12):
 _SysExcInfoType: TypeAlias = tuple[type[BaseException], BaseException, TracebackType | None] | tuple[None, None, None]
 _ExcInfoType: TypeAlias = None | bool | _SysExcInfoType | BaseException
 _ArgsType: TypeAlias = tuple[object, ...] | Mapping[str, object]
-_FilterType: TypeAlias = Filter | Callable[[LogRecord], bool]
 _Level: TypeAlias = int | str
 _FormatStyle: TypeAlias = Literal["%", "{", "$"]
 
+if sys.version_info >= (3, 12):
+    class _SupportsFilter(Protocol):
+        def filter(self, __record: LogRecord) -> bool | LogRecord: ...
+
+    _FilterType: TypeAlias = Filter | Callable[[LogRecord], bool | LogRecord] | _SupportsFilter
+else:
+    class _SupportsFilter(Protocol):
+        def filter(self, __record: LogRecord) -> bool: ...
+
+    _FilterType: TypeAlias = Filter | Callable[[LogRecord], bool] | _SupportsFilter
+
 raiseExceptions: bool
 logThreads: bool
 logMultiprocessing: bool
diff --git a/mypy/typeshed/stdlib/multiprocessing/pool.pyi b/mypy/typeshed/stdlib/multiprocessing/pool.pyi
index dd4b865a3574..c52f1c1f5453 100644
--- a/mypy/typeshed/stdlib/multiprocessing/pool.pyi
+++ b/mypy/typeshed/stdlib/multiprocessing/pool.pyi
@@ -91,7 +91,7 @@ class Pool:
         func: Callable[[_S], _T],
         iterable: Iterable[_S],
         chunksize: int | None = None,
-        callback: Callable[[_T], object] | None = None,
+        callback: Callable[[list[_T]], object] | None = None,
         error_callback: Callable[[BaseException], object] | None = None,
     ) -> MapResult[_T]: ...
     def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ...
@@ -102,7 +102,7 @@ class Pool:
         func: Callable[..., _T],
         iterable: Iterable[Iterable[Any]],
         chunksize: int | None = None,
-        callback: Callable[[_T], object] | None = None,
+        callback: Callable[[list[_T]], object] | None = None,
         error_callback: Callable[[BaseException], object] | None = None,
     ) -> AsyncResult[list[_T]]: ...
     def close(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi b/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi
index 686a45d9ae41..636d58842158 100644
--- a/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi
+++ b/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi
@@ -73,7 +73,7 @@ def synchronized(obj: ctypes.Array[_CT], lock: _LockLike | None = None, ctx: Any
 def synchronized(obj: _CT, lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedBase[_CT]: ...
 
 class _AcquireFunc(Protocol):
-    def __call__(self, block: bool = ..., timeout: float | None = ...) -> bool: ...
+    def __call__(self, __block: bool = ..., __timeout: float | None = ...) -> bool: ...
 
 class SynchronizedBase(Generic[_CT]):
     acquire: _AcquireFunc
diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi
index 7fd04218fd7c..2810d086ae49 100644
--- a/mypy/typeshed/stdlib/os/__init__.pyi
+++ b/mypy/typeshed/stdlib/os/__init__.pyi
@@ -923,10 +923,16 @@ def times() -> times_result: ...
 def waitpid(__pid: int, __options: int) -> tuple[int, int]: ...
 
 if sys.platform == "win32":
-    if sys.version_info >= (3, 8):
-        def startfile(path: StrOrBytesPath, operation: str | None = None) -> None: ...
+    if sys.version_info >= (3, 10):
+        def startfile(
+            filepath: StrOrBytesPath,
+            operation: str = ...,
+            arguments: str = "",
+            cwd: StrOrBytesPath | None = None,
+            show_cmd: int = 1,
+        ) -> None: ...
     else:
-        def startfile(filepath: StrOrBytesPath, operation: str | None = None) -> None: ...
+        def startfile(filepath: StrOrBytesPath, operation: str = ...) -> None: ...
 
 else:
     def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ...
@@ -964,9 +970,9 @@ else:
     def WTERMSIG(status: int) -> int: ...
     if sys.version_info >= (3, 8):
         def posix_spawn(
-            path: StrOrBytesPath,
-            argv: _ExecVArgs,
-            env: _ExecEnv,
+            __path: StrOrBytesPath,
+            __argv: _ExecVArgs,
+            __env: _ExecEnv,
             *,
             file_actions: Sequence[tuple[Any, ...]] | None = ...,
             setpgroup: int | None = ...,
@@ -977,9 +983,9 @@ else:
             scheduler: tuple[Any, sched_param] | None = ...,
         ) -> int: ...
         def posix_spawnp(
-            path: StrOrBytesPath,
-            argv: _ExecVArgs,
-            env: _ExecEnv,
+            __path: StrOrBytesPath,
+            __argv: _ExecVArgs,
+            __env: _ExecEnv,
             *,
             file_actions: Sequence[tuple[Any, ...]] | None = ...,
             setpgroup: int | None = ...,
diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi
index e584d7f571a7..6db7daebbb41 100644
--- a/mypy/typeshed/stdlib/smtplib.pyi
+++ b/mypy/typeshed/stdlib/smtplib.pyi
@@ -68,9 +68,9 @@ def quotedata(data: str) -> str: ...
 
 class _AuthObject(Protocol):
     @overload
-    def __call__(self, challenge: None = None) -> str | None: ...
+    def __call__(self, __challenge: None = None) -> str | None: ...
     @overload
-    def __call__(self, challenge: bytes) -> str: ...
+    def __call__(self, __challenge: bytes) -> str: ...
 
 class SMTP:
     debuglevel: int
diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
index e85f49207763..236e093c9909 100644
--- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
+++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi
@@ -349,10 +349,10 @@ class Connection:
         def create_function(self, name: str, num_params: int, func: Callable[..., _SqliteData] | None) -> None: ...
 
     @overload
-    def cursor(self, cursorClass: None = None) -> Cursor: ...
+    def cursor(self, factory: None = None) -> Cursor: ...
     @overload
-    def cursor(self, cursorClass: Callable[[Connection], _CursorT]) -> _CursorT: ...
-    def execute(self, sql: str, parameters: _Parameters = ...) -> Cursor: ...
+    def cursor(self, factory: Callable[[Connection], _CursorT]) -> _CursorT: ...
+    def execute(self, __sql: str, __parameters: _Parameters = ...) -> Cursor: ...
     def executemany(self, __sql: str, __parameters: Iterable[_Parameters]) -> Cursor: ...
     def executescript(self, __sql_script: str) -> Cursor: ...
     def interrupt(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi
index 1013db7ee984..b6cc23651ade 100644
--- a/mypy/typeshed/stdlib/subprocess.pyi
+++ b/mypy/typeshed/stdlib/subprocess.pyi
@@ -248,7 +248,7 @@ if sys.version_info >= (3, 11):
         shell: bool = False,
         cwd: StrOrBytesPath | None = None,
         env: _ENV | None = None,
-        universal_newlines: Literal[False, None] = None,
+        universal_newlines: Literal[False] | None = None,
         startupinfo: Any = None,
         creationflags: int = 0,
         restore_signals: bool = True,
@@ -260,7 +260,7 @@ if sys.version_info >= (3, 11):
         encoding: None = None,
         errors: None = None,
         input: ReadableBuffer | None = None,
-        text: Literal[None, False] = None,
+        text: Literal[False] | None = None,
         timeout: float | None = None,
         user: str | int | None = None,
         group: str | int | None = None,
@@ -452,7 +452,7 @@ elif sys.version_info >= (3, 10):
         shell: bool = False,
         cwd: StrOrBytesPath | None = None,
         env: _ENV | None = None,
-        universal_newlines: Literal[False, None] = None,
+        universal_newlines: Literal[False] | None = None,
         startupinfo: Any = None,
         creationflags: int = 0,
         restore_signals: bool = True,
@@ -464,7 +464,7 @@ elif sys.version_info >= (3, 10):
         encoding: None = None,
         errors: None = None,
         input: ReadableBuffer | None = None,
-        text: Literal[None, False] = None,
+        text: Literal[False] | None = None,
         timeout: float | None = None,
         user: str | int | None = None,
         group: str | int | None = None,
@@ -650,7 +650,7 @@ elif sys.version_info >= (3, 9):
         shell: bool = False,
         cwd: StrOrBytesPath | None = None,
         env: _ENV | None = None,
-        universal_newlines: Literal[False, None] = None,
+        universal_newlines: Literal[False] | None = None,
         startupinfo: Any = None,
         creationflags: int = 0,
         restore_signals: bool = True,
@@ -662,7 +662,7 @@ elif sys.version_info >= (3, 9):
         encoding: None = None,
         errors: None = None,
         input: ReadableBuffer | None = None,
-        text: Literal[None, False] = None,
+        text: Literal[False] | None = None,
         timeout: float | None = None,
         user: str | int | None = None,
         group: str | int | None = None,
@@ -829,7 +829,7 @@ else:
         shell: bool = False,
         cwd: StrOrBytesPath | None = None,
         env: _ENV | None = None,
-        universal_newlines: Literal[False, None] = None,
+        universal_newlines: Literal[False] | None = None,
         startupinfo: Any = None,
         creationflags: int = 0,
         restore_signals: bool = True,
@@ -841,7 +841,7 @@ else:
         encoding: None = None,
         errors: None = None,
         input: ReadableBuffer | None = None,
-        text: Literal[None, False] = None,
+        text: Literal[False] | None = None,
         timeout: float | None = None,
     ) -> CompletedProcess[bytes]: ...
     @overload
@@ -1242,7 +1242,7 @@ if sys.version_info >= (3, 11):
         shell: bool = False,
         cwd: StrOrBytesPath | None = None,
         env: _ENV | None = None,
-        universal_newlines: Literal[False, None] = None,
+        universal_newlines: Literal[False] | None = None,
         startupinfo: Any = None,
         creationflags: int = 0,
         restore_signals: bool = True,
@@ -1253,7 +1253,7 @@ if sys.version_info >= (3, 11):
         input: _InputString | None = ...,
         encoding: None = None,
         errors: None = None,
-        text: Literal[None, False] = None,
+        text: Literal[False] | None = None,
         user: str | int | None = None,
         group: str | int | None = None,
         extra_groups: Iterable[str | int] | None = None,
@@ -1428,7 +1428,7 @@ elif sys.version_info >= (3, 10):
         shell: bool = False,
         cwd: StrOrBytesPath | None = None,
         env: _ENV | None = None,
-        universal_newlines: Literal[False, None] = None,
+        universal_newlines: Literal[False] | None = None,
         startupinfo: Any = None,
         creationflags: int = 0,
         restore_signals: bool = True,
@@ -1439,7 +1439,7 @@ elif sys.version_info >= (3, 10):
         input: _InputString | None = ...,
         encoding: None = None,
         errors: None = None,
-        text: Literal[None, False] = None,
+        text: Literal[False] | None = None,
         user: str | int | None = None,
         group: str | int | None = None,
         extra_groups: Iterable[str | int] | None = None,
@@ -1608,7 +1608,7 @@ elif sys.version_info >= (3, 9):
         shell: bool = False,
         cwd: StrOrBytesPath | None = None,
         env: _ENV | None = None,
-        universal_newlines: Literal[False, None] = None,
+        universal_newlines: Literal[False] | None = None,
         startupinfo: Any = None,
         creationflags: int = 0,
         restore_signals: bool = True,
@@ -1619,7 +1619,7 @@ elif sys.version_info >= (3, 9):
         input: _InputString | None = ...,
         encoding: None = None,
         errors: None = None,
-        text: Literal[None, False] = None,
+        text: Literal[False] | None = None,
         user: str | int | None = None,
         group: str | int | None = None,
         extra_groups: Iterable[str | int] | None = None,
@@ -1769,7 +1769,7 @@ else:
         shell: bool = False,
         cwd: StrOrBytesPath | None = None,
         env: _ENV | None = None,
-        universal_newlines: Literal[False, None] = None,
+        universal_newlines: Literal[False] | None = None,
         startupinfo: Any = None,
         creationflags: int = 0,
         restore_signals: bool = True,
@@ -1780,7 +1780,7 @@ else:
         input: _InputString | None = ...,
         encoding: None = None,
         errors: None = None,
-        text: Literal[None, False] = None,
+        text: Literal[False] | None = None,
     ) -> bytes: ...
     @overload
     def check_output(
@@ -1990,14 +1990,14 @@ class Popen(Generic[AnyStr]):
             shell: bool = False,
             cwd: StrOrBytesPath | None = None,
             env: _ENV | None = None,
-            universal_newlines: Literal[False, None] = None,
+            universal_newlines: Literal[False] | None = None,
             startupinfo: Any | None = None,
             creationflags: int = 0,
             restore_signals: bool = True,
             start_new_session: bool = False,
             pass_fds: Collection[int] = (),
             *,
-            text: Literal[None, False] = None,
+            text: Literal[False] | None = None,
             encoding: None = None,
             errors: None = None,
             user: str | int | None = None,
@@ -2175,14 +2175,14 @@ class Popen(Generic[AnyStr]):
             shell: bool = False,
             cwd: StrOrBytesPath | None = None,
             env: _ENV | None = None,
-            universal_newlines: Literal[False, None] = None,
+            universal_newlines: Literal[False] | None = None,
             startupinfo: Any | None = None,
             creationflags: int = 0,
             restore_signals: bool = True,
             start_new_session: bool = False,
             pass_fds: Collection[int] = (),
             *,
-            text: Literal[None, False] = None,
+            text: Literal[False] | None = None,
             encoding: None = None,
             errors: None = None,
             user: str | int | None = None,
@@ -2354,14 +2354,14 @@ class Popen(Generic[AnyStr]):
             shell: bool = False,
             cwd: StrOrBytesPath | None = None,
             env: _ENV | None = None,
-            universal_newlines: Literal[False, None] = None,
+            universal_newlines: Literal[False] | None = None,
             startupinfo: Any | None = None,
             creationflags: int = 0,
             restore_signals: bool = True,
             start_new_session: bool = False,
             pass_fds: Collection[int] = (),
             *,
-            text: Literal[None, False] = None,
+            text: Literal[False] | None = None,
             encoding: None = None,
             errors: None = None,
             user: str | int | None = None,
@@ -2514,14 +2514,14 @@ class Popen(Generic[AnyStr]):
             shell: bool = False,
             cwd: StrOrBytesPath | None = None,
             env: _ENV | None = None,
-            universal_newlines: Literal[False, None] = None,
+            universal_newlines: Literal[False] | None = None,
             startupinfo: Any | None = None,
             creationflags: int = 0,
             restore_signals: bool = True,
             start_new_session: bool = False,
             pass_fds: Collection[int] = (),
             *,
-            text: Literal[None, False] = None,
+            text: Literal[False] | None = None,
             encoding: None = None,
             errors: None = None,
         ) -> None: ...
diff --git a/mypy/typeshed/stdlib/sys/__init__.pyi b/mypy/typeshed/stdlib/sys/__init__.pyi
index cf3b1bc47d75..1d4111af3a49 100644
--- a/mypy/typeshed/stdlib/sys/__init__.pyi
+++ b/mypy/typeshed/stdlib/sys/__init__.pyi
@@ -17,7 +17,9 @@ _OptExcInfo: TypeAlias = OptExcInfo  # noqa: Y047  # TODO: obsolete, remove fall
 
 # Intentionally omits one deprecated and one optional method of `importlib.abc.MetaPathFinder`
 class _MetaPathFinder(Protocol):
-    def find_spec(self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = ...) -> ModuleSpec | None: ...
+    def find_spec(
+        self, __fullname: str, __path: Sequence[str] | None, __target: ModuleType | None = ...
+    ) -> ModuleSpec | None: ...
 
 # ----- sys variables -----
 if sys.platform != "win32":
diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi
index c35db3ef7e34..d0eb97aa5ebd 100644
--- a/mypy/typeshed/stdlib/tkinter/__init__.pyi
+++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi
@@ -7,7 +7,7 @@ from tkinter.constants import *
 from tkinter.font import _FontDescription
 from types import TracebackType
 from typing import Any, Generic, NamedTuple, TypeVar, overload, type_check_only
-from typing_extensions import Literal, TypeAlias, TypedDict
+from typing_extensions import Literal, TypeAlias, TypedDict, deprecated
 
 if sys.version_info >= (3, 9):
     __all__ = [
@@ -273,10 +273,14 @@ class Variable:
     def trace_add(self, mode: _TraceMode, callback: Callable[[str, str, str], object]) -> str: ...
     def trace_remove(self, mode: _TraceMode, cbname: str) -> None: ...
     def trace_info(self) -> list[tuple[tuple[_TraceMode, ...], str]]: ...
-    def trace_variable(self, mode, callback): ...  # deprecated
-    def trace_vdelete(self, mode, cbname) -> None: ...  # deprecated
-    def trace_vinfo(self): ...  # deprecated
-    trace = trace_variable  # deprecated
+    @deprecated("use trace_add() instead of trace()")
+    def trace(self, mode, callback): ...
+    @deprecated("use trace_add() instead of trace_variable()")
+    def trace_variable(self, mode, callback): ...
+    @deprecated("use trace_remove() instead of trace_vdelete()")
+    def trace_vdelete(self, mode, cbname) -> None: ...
+    @deprecated("use trace_info() instead of trace_vinfo()")
+    def trace_vinfo(self): ...
     def __eq__(self, other: object) -> bool: ...
 
 class StringVar(Variable):
@@ -343,9 +347,8 @@ class Misc:
     def tk_focusFollowsMouse(self) -> None: ...
     def tk_focusNext(self) -> Misc | None: ...
     def tk_focusPrev(self) -> Misc | None: ...
-    @overload
-    def after(self, ms: int, func: None = None) -> None: ...
-    @overload
+    # .after() can be called without the "func" argument, but it is basically never what you want.
+    # It behaves like time.sleep() and freezes the GUI app.
     def after(self, ms: int | Literal["idle"], func: Callable[..., object], *args: Any) -> str: ...
     # after_idle is essentially partialmethod(after, "idle")
     def after_idle(self, func: Callable[..., object], *args: Any) -> str: ...
diff --git a/mypy/typeshed/stdlib/tkinter/dnd.pyi b/mypy/typeshed/stdlib/tkinter/dnd.pyi
index 4a6ab42b3e33..8f438537369c 100644
--- a/mypy/typeshed/stdlib/tkinter/dnd.pyi
+++ b/mypy/typeshed/stdlib/tkinter/dnd.pyi
@@ -6,7 +6,7 @@ if sys.version_info >= (3, 9):
     __all__ = ["dnd_start", "DndHandler"]
 
 class _DndSource(Protocol):
-    def dnd_end(self, target: Widget | None, event: Event[Misc] | None) -> None: ...
+    def dnd_end(self, __target: Widget | None, __event: Event[Misc] | None) -> None: ...
 
 class DndHandler:
     root: ClassVar[Tk | None]
diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi
index a50bbf145b9f..fcaf5264c5e3 100644
--- a/mypy/typeshed/stdlib/types.pyi
+++ b/mypy/typeshed/stdlib/types.pyi
@@ -335,7 +335,7 @@ class SimpleNamespace:
     def __delattr__(self, __name: str) -> None: ...
 
 class _LoaderProtocol(Protocol):
-    def load_module(self, fullname: str) -> ModuleType: ...
+    def load_module(self, __fullname: str) -> ModuleType: ...
 
 class ModuleType:
     __name__: str
diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi
index ad5719ca9f56..7694157d70fe 100644
--- a/mypy/typeshed/stdlib/typing.pyi
+++ b/mypy/typeshed/stdlib/typing.pyi
@@ -513,7 +513,7 @@ class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]):
     @abstractmethod
     def __len__(self) -> int: ...
 
-class Sequence(Collection[_T_co], Reversible[_T_co], Generic[_T_co]):
+class Sequence(Collection[_T_co], Reversible[_T_co]):
     @overload
     @abstractmethod
     def __getitem__(self, index: int) -> _T_co: ...
@@ -607,7 +607,7 @@ class ItemsView(MappingView, AbstractSet[tuple[_KT_co, _VT_co]], Generic[_KT_co,
     def __xor__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ...
     def __rxor__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ...
 
-class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]):
+class KeysView(MappingView, AbstractSet[_KT_co]):
     def __init__(self, mapping: Mapping[_KT_co, Any]) -> None: ...  # undocumented
     def __and__(self, other: Iterable[Any]) -> set[_KT_co]: ...
     def __rand__(self, other: Iterable[_T]) -> set[_T]: ...
@@ -623,7 +623,7 @@ class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]):
     def __xor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ...
     def __rxor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ...
 
-class ValuesView(MappingView, Collection[_VT_co], Generic[_VT_co]):
+class ValuesView(MappingView, Collection[_VT_co]):
     def __init__(self, mapping: Mapping[Any, _VT_co]) -> None: ...  # undocumented
     def __contains__(self, value: object) -> bool: ...
     def __iter__(self) -> Iterator[_VT_co]: ...
diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi
index aa04e16d62ec..cc5d683e245a 100644
--- a/mypy/typeshed/stdlib/unittest/case.pyi
+++ b/mypy/typeshed/stdlib/unittest/case.pyi
@@ -52,7 +52,7 @@ else:
         ) -> bool | None: ...
 
 if sys.version_info >= (3, 8):
-    def addModuleCleanup(__function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ...
+    def addModuleCleanup(__function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ...
     def doModuleCleanups() -> None: ...
 
 if sys.version_info >= (3, 11):
@@ -136,7 +136,7 @@ class TestCase:
     def assertRaises(
         self,
         expected_exception: type[BaseException] | tuple[type[BaseException], ...],
-        callable: Callable[..., Any],
+        callable: Callable[..., object],
         *args: Any,
         **kwargs: Any,
     ) -> None: ...
@@ -149,7 +149,7 @@ class TestCase:
         self,
         expected_exception: type[BaseException] | tuple[type[BaseException], ...],
         expected_regex: str | Pattern[str],
-        callable: Callable[..., Any],
+        callable: Callable[..., object],
         *args: Any,
         **kwargs: Any,
     ) -> None: ...
@@ -161,7 +161,7 @@ class TestCase:
     def assertWarns(
         self,
         expected_warning: type[Warning] | tuple[type[Warning], ...],
-        callable: Callable[_P, Any],
+        callable: Callable[_P, object],
         *args: _P.args,
         **kwargs: _P.kwargs,
     ) -> None: ...
@@ -174,7 +174,7 @@ class TestCase:
         self,
         expected_warning: type[Warning] | tuple[type[Warning], ...],
         expected_regex: str | Pattern[str],
-        callable: Callable[_P, Any],
+        callable: Callable[_P, object],
         *args: _P.args,
         **kwargs: _P.kwargs,
     ) -> None: ...
@@ -256,9 +256,9 @@ class TestCase:
     def id(self) -> str: ...
     def shortDescription(self) -> str | None: ...
     if sys.version_info >= (3, 8):
-        def addCleanup(self, __function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ...
+        def addCleanup(self, __function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ...
     else:
-        def addCleanup(self, function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ...
+        def addCleanup(self, function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ...
 
     if sys.version_info >= (3, 11):
         def enterContext(self, cm: AbstractContextManager[_T]) -> _T: ...
@@ -266,7 +266,7 @@ class TestCase:
     def doCleanups(self) -> None: ...
     if sys.version_info >= (3, 8):
         @classmethod
-        def addClassCleanup(cls, __function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ...
+        def addClassCleanup(cls, __function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ...
         @classmethod
         def doClassCleanups(cls) -> None: ...
 
@@ -299,9 +299,9 @@ class TestCase:
 class FunctionTestCase(TestCase):
     def __init__(
         self,
-        testFunc: Callable[[], Any],
-        setUp: Callable[[], Any] | None = None,
-        tearDown: Callable[[], Any] | None = None,
+        testFunc: Callable[[], object],
+        setUp: Callable[[], object] | None = None,
+        tearDown: Callable[[], object] | None = None,
         description: str | None = None,
     ) -> None: ...
     def runTest(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/unittest/main.pyi b/mypy/typeshed/stdlib/unittest/main.pyi
index d29e9a2b8da8..3e8cb7b764c2 100644
--- a/mypy/typeshed/stdlib/unittest/main.pyi
+++ b/mypy/typeshed/stdlib/unittest/main.pyi
@@ -11,7 +11,7 @@ MAIN_EXAMPLES: str
 MODULE_EXAMPLES: str
 
 class _TestRunner(Protocol):
-    def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ...
+    def run(self, __test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ...
 
 # not really documented
 class TestProgram:
diff --git a/mypy/typeshed/stdlib/unittest/util.pyi b/mypy/typeshed/stdlib/unittest/util.pyi
index 845accfebedd..c42d1346e4b7 100644
--- a/mypy/typeshed/stdlib/unittest/util.pyi
+++ b/mypy/typeshed/stdlib/unittest/util.pyi
@@ -1,4 +1,4 @@
-from collections.abc import Sequence
+from collections.abc import MutableSequence, Sequence
 from typing import Any, TypeVar
 from typing_extensions import TypeAlias
 
@@ -17,7 +17,7 @@ def _common_shorten_repr(*args: str) -> tuple[str, ...]: ...
 def safe_repr(obj: object, short: bool = False) -> str: ...
 def strclass(cls: type) -> str: ...
 def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ...
-def unorderable_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ...
+def unorderable_list_difference(expected: MutableSequence[_T], actual: MutableSequence[_T]) -> tuple[list[_T], list[_T]]: ...
 def three_way_cmp(x: Any, y: Any) -> int: ...
 def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ...
 def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ...
diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi
index a4849dfa2e6e..237a4d264b51 100644
--- a/mypy/typeshed/stdlib/urllib/request.pyi
+++ b/mypy/typeshed/stdlib/urllib/request.pyi
@@ -227,7 +227,8 @@ class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
 class _HTTPConnectionProtocol(Protocol):
     def __call__(
         self,
-        host: str,
+        __host: str,
+        *,
         port: int | None = ...,
         timeout: float = ...,
         source_address: tuple[str, int] | None = ...,
diff --git a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi
index d8ff2f5b6090..b08ca88e7e97 100644
--- a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi
+++ b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi
@@ -86,7 +86,7 @@ class Element:
     attrib: dict[str, str]
     text: str | None
     tail: str | None
-    def __init__(self, tag: str | Callable[..., Element], attrib: dict[str, str] = ..., **extra: str) -> None: ...
+    def __init__(self, tag: str, attrib: dict[str, str] = ..., **extra: str) -> None: ...
     def append(self, __subelement: Element) -> None: ...
     def clear(self) -> None: ...
     def extend(self, __elements: Iterable[Element]) -> None: ...
@@ -132,7 +132,7 @@ def SubElement(parent: Element, tag: str, attrib: dict[str, str] = ..., **extra:
 def Comment(text: str | None = None) -> Element: ...
 def ProcessingInstruction(target: str, text: str | None = None) -> Element: ...
 
-PI: Callable[..., Element]
+PI = ProcessingInstruction
 
 class QName:
     text: str
diff --git a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi
index 06e03a1e4d06..528f35963947 100644
--- a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi
+++ b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi
@@ -11,7 +11,7 @@ def quoteattr(data: str, entities: Mapping[str, str] = {}) -> str: ...
 class XMLGenerator(handler.ContentHandler):
     def __init__(
         self,
-        out: TextIOBase | RawIOBase | StreamWriter | StreamReaderWriter | SupportsWrite[str] | None = None,
+        out: TextIOBase | RawIOBase | StreamWriter | StreamReaderWriter | SupportsWrite[bytes] | None = None,
         encoding: str = "iso-8859-1",
         short_empty_elements: bool = False,
     ) -> None: ...

From 058f8fd25fc425092bb4116b318cb61e1e43e0ff Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sat, 18 Nov 2023 22:34:59 +0300
Subject: [PATCH 260/288] Do not allow `TypedDict` classes with extra keywords
 (#16438)

---
 mypy/messages.py                    | 15 +++++++++++----
 mypy/semanal_typeddict.py           |  6 ++++++
 test-data/unit/check-typeddict.test | 30 +++++++++++++++++++++++++++++
 3 files changed, 47 insertions(+), 4 deletions(-)

diff --git a/mypy/messages.py b/mypy/messages.py
index 19aafedd5586..ddb048444695 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -991,10 +991,17 @@ def maybe_note_about_special_args(self, callee: CallableType, context: Context)
                 context,
             )
 
+    def unexpected_keyword_argument_for_function(
+        self, for_func: str, name: str, context: Context, *, matches: list[str] | None = None
+    ) -> None:
+        msg = f'Unexpected keyword argument "{name}"' + for_func
+        if matches:
+            msg += f"; did you mean {pretty_seq(matches, 'or')}?"
+        self.fail(msg, context, code=codes.CALL_ARG)
+
     def unexpected_keyword_argument(
         self, callee: CallableType, name: str, arg_type: Type, context: Context
     ) -> None:
-        msg = f'Unexpected keyword argument "{name}"' + for_function(callee)
         # Suggest intended keyword, look for type match else fallback on any match.
         matching_type_args = []
         not_matching_type_args = []
@@ -1008,9 +1015,9 @@ def unexpected_keyword_argument(
         matches = best_matches(name, matching_type_args, n=3)
         if not matches:
             matches = best_matches(name, not_matching_type_args, n=3)
-        if matches:
-            msg += f"; did you mean {pretty_seq(matches, 'or')}?"
-        self.fail(msg, context, code=codes.CALL_ARG)
+        self.unexpected_keyword_argument_for_function(
+            for_function(callee), name, context, matches=matches
+        )
         module = find_defining_module(self.modules, callee)
         if module:
             assert callee.definition is not None
diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py
index a013cc040e89..f399d8872a32 100644
--- a/mypy/semanal_typeddict.py
+++ b/mypy/semanal_typeddict.py
@@ -323,6 +323,12 @@ def analyze_typeddict_classdef_fields(
         total: bool | None = True
         if "total" in defn.keywords:
             total = require_bool_literal_argument(self.api, defn.keywords["total"], "total", True)
+        if defn.keywords and defn.keywords.keys() != {"total"}:
+            for_function = ' for "__init_subclass__" of "TypedDict"'
+            for key in defn.keywords.keys():
+                if key == "total":
+                    continue
+                self.msg.unexpected_keyword_argument_for_function(for_function, key, defn)
         required_keys = {
             field
             for (field, t) in zip(fields, types)
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index c584906dd965..dc808390021a 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -3408,3 +3408,33 @@ B = TypedDict("B", {  # E: Type of a TypedDict key becomes "Any" due to an unfol
 })
 [builtins fixtures/dict.pyi]
 [typing fixtures/typing-typeddict.pyi]
+
+[case testTypedDictWithClassLevelKeywords]
+from typing import TypedDict, Generic, TypeVar
+
+T = TypeVar('T')
+
+class Meta(type): ...
+
+class WithMetaKeyword(TypedDict, metaclass=Meta):  # E: Unexpected keyword argument "metaclass" for "__init_subclass__" of "TypedDict"
+    ...
+
+class GenericWithMetaKeyword(TypedDict, Generic[T], metaclass=Meta):  # E: Unexpected keyword argument "metaclass" for "__init_subclass__" of "TypedDict"
+    ...
+
+# We still don't allow this, because the implementation is much easier
+# and it does not make any practical sense to do it:
+class WithTypeMeta(TypedDict, metaclass=type):  # E: Unexpected keyword argument "metaclass" for "__init_subclass__" of "TypedDict"
+    ...
+
+class OtherKeywords(TypedDict, a=1, b=2, c=3, total=True):  # E: Unexpected keyword argument "a" for "__init_subclass__" of "TypedDict" \
+                                                            # E: Unexpected keyword argument "b" for "__init_subclass__" of "TypedDict" \
+                                                            # E: Unexpected keyword argument "c" for "__init_subclass__" of "TypedDict"
+    ...
+
+class TotalInTheMiddle(TypedDict, a=1, total=True, b=2, c=3):  # E: Unexpected keyword argument "a" for "__init_subclass__" of "TypedDict" \
+                                                            # E: Unexpected keyword argument "b" for "__init_subclass__" of "TypedDict" \
+                                                            # E: Unexpected keyword argument "c" for "__init_subclass__" of "TypedDict"
+    ...
+[builtins fixtures/dict.pyi]
+[typing fixtures/typing-typeddict.pyi]

From 6cbdab8317f40e146058fd46ea96d5bbbea086c4 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Sat, 18 Nov 2023 20:22:54 +0000
Subject: [PATCH 261/288] Stubtest: more helpful errors if a function is
 missing from stub (#16517)

---
 mypy/stubtest.py | 18 ++++++++++++++++--
 1 file changed, 16 insertions(+), 2 deletions(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index 46468e8e18e0..98ab8a043aaf 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -102,7 +102,17 @@ def __init__(
         self.stub_object = stub_object
         self.runtime_object = runtime_object
         self.stub_desc = stub_desc or str(getattr(stub_object, "type", stub_object))
-        self.runtime_desc = runtime_desc or _truncate(repr(runtime_object), 100)
+
+        if runtime_desc is None:
+            runtime_sig = safe_inspect_signature(runtime_object)
+            if runtime_sig is None:
+                self.runtime_desc = _truncate(repr(runtime_object), 100)
+            else:
+                runtime_is_async = inspect.iscoroutinefunction(runtime_object)
+                description = describe_runtime_callable(runtime_sig, is_async=runtime_is_async)
+                self.runtime_desc = _truncate(description, 100)
+        else:
+            self.runtime_desc = runtime_desc
 
     def is_missing_stub(self) -> bool:
         """Whether or not the error is for something missing from the stub."""
@@ -1000,7 +1010,7 @@ def verify_funcitem(
     if signature:
         stub_sig = Signature.from_funcitem(stub)
         runtime_sig = Signature.from_inspect_signature(signature)
-        runtime_sig_desc = f'{"async " if runtime_is_coroutine else ""}def {signature}'
+        runtime_sig_desc = describe_runtime_callable(signature, is_async=runtime_is_coroutine)
         stub_desc = str(stub_sig)
     else:
         runtime_sig_desc, stub_desc = None, None
@@ -1482,6 +1492,10 @@ def safe_inspect_signature(runtime: Any) -> inspect.Signature | None:
         return None
 
 
+def describe_runtime_callable(signature: inspect.Signature, *, is_async: bool) -> str:
+    return f'{"async " if is_async else ""}def {signature}'
+
+
 def is_subtype_helper(left: mypy.types.Type, right: mypy.types.Type) -> bool:
     """Checks whether ``left`` is a subtype of ``right``."""
     left = mypy.types.get_proper_type(left)

From e81309e955b23737a5efe3e24a8f705da60fcb82 Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra <jelle.zijlstra@gmail.com>
Date: Sat, 18 Nov 2023 12:30:36 -0800
Subject: [PATCH 262/288] PEP 702: decorator is in warnings, not
 typing-extensions (#16488)

Followup from
https://github.com/python/mypy/pull/16457/files#r1392715784
---
 mypy/test/teststubtest.py                     | 4 ++--
 mypy/types.py                                 | 2 +-
 test-data/unit/lib-stub/typing_extensions.pyi | 3 ++-
 3 files changed, 5 insertions(+), 4 deletions(-)

diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py
index 58602be3a624..a2e9668a9ac4 100644
--- a/mypy/test/teststubtest.py
+++ b/mypy/test/teststubtest.py
@@ -72,7 +72,6 @@ class Tuple(Sequence[_T_co]): ...
 class NamedTuple(tuple[Any, ...]): ...
 def overload(func: _T) -> _T: ...
 def type_check_only(func: _T) -> _T: ...
-def deprecated(__msg: str) -> Callable[[_T], _T]: ...
 def final(func: _T) -> _T: ...
 """
 
@@ -635,7 +634,8 @@ def f5(__b: str) -> str: ...
         )
         yield Case(
             stub="""
-            from typing import deprecated, final
+            from typing import final
+            from typing_extensions import deprecated
             class Foo:
                 @overload
                 @final
diff --git a/mypy/types.py b/mypy/types.py
index e7738bd7d088..d19766c1de34 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -123,7 +123,7 @@
 ANNOTATED_TYPE_NAMES: Final = ("typing.Annotated", "typing_extensions.Annotated")
 
 # Supported @deprecated type names
-DEPRECATED_TYPE_NAMES: Final = ("typing.deprecated", "typing_extensions.deprecated")
+DEPRECATED_TYPE_NAMES: Final = ("warnings.deprecated", "typing_extensions.deprecated")
 
 # We use this constant in various places when checking `tuple` subtyping:
 TUPLE_LIKE_INSTANCE_NAMES: Final = (
diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi
index 216005e3cf83..c88aa5c815c5 100644
--- a/test-data/unit/lib-stub/typing_extensions.pyi
+++ b/test-data/unit/lib-stub/typing_extensions.pyi
@@ -1,5 +1,5 @@
 import typing
-from typing import Any, Mapping, Iterable, Iterator, NoReturn as NoReturn, Dict, Tuple, Type
+from typing import Any, Callable, Mapping, Iterable, Iterator, NoReturn as NoReturn, Dict, Tuple, Type
 from typing import TYPE_CHECKING as TYPE_CHECKING
 from typing import NewType as NewType, overload as overload
 
@@ -75,5 +75,6 @@ def dataclass_transform(
 ) -> Callable[[T], T]: ...
 
 def override(__arg: _T) -> _T: ...
+def deprecated(__msg: str) -> Callable[[_T], _T]: ...
 
 _FutureFeatureFixture = 0

From 706389d3d551dc8f18ce5e5f48584f351a7a07a1 Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra <jelle.zijlstra@gmail.com>
Date: Sat, 18 Nov 2023 12:31:14 -0800
Subject: [PATCH 263/288] stubtest: hack for "<unrepresentable>" defaults
 (#16433)

See python/cpython#87233
---
 mypy/stubtest.py          | 34 +++++++++++++++++++++++++++++++++-
 mypy/test/teststubtest.py | 18 ++++++++++++++++++
 2 files changed, 51 insertions(+), 1 deletion(-)

diff --git a/mypy/stubtest.py b/mypy/stubtest.py
index 98ab8a043aaf..c02a3efd8dc0 100644
--- a/mypy/stubtest.py
+++ b/mypy/stubtest.py
@@ -55,6 +55,17 @@ def __repr__(self) -> str:
 T = TypeVar("T")
 MaybeMissing: typing_extensions.TypeAlias = Union[T, Missing]
 
+
+class Unrepresentable:
+    """Marker object for unrepresentable parameter defaults."""
+
+    def __repr__(self) -> str:
+        return "<unrepresentable>"
+
+
+UNREPRESENTABLE: typing_extensions.Final = Unrepresentable()
+
+
 _formatter: typing_extensions.Final = FancyFormatter(sys.stdout, sys.stderr, False)
 
 
@@ -681,6 +692,7 @@ def _verify_arg_default_value(
                 if (
                     stub_default is not UNKNOWN
                     and stub_default is not ...
+                    and runtime_arg.default is not UNREPRESENTABLE
                     and (
                         stub_default != runtime_arg.default
                         # We want the types to match exactly, e.g. in case the stub has
@@ -1483,7 +1495,27 @@ def is_read_only_property(runtime: object) -> bool:
 
 def safe_inspect_signature(runtime: Any) -> inspect.Signature | None:
     try:
-        return inspect.signature(runtime)
+        try:
+            return inspect.signature(runtime)
+        except ValueError:
+            if (
+                hasattr(runtime, "__text_signature__")
+                and "<unrepresentable>" in runtime.__text_signature__
+            ):
+                # Try to fix up the signature. Workaround for
+                # https://github.com/python/cpython/issues/87233
+                sig = runtime.__text_signature__.replace("<unrepresentable>", "...")
+                sig = inspect._signature_fromstr(inspect.Signature, runtime, sig)  # type: ignore[attr-defined]
+                assert isinstance(sig, inspect.Signature)
+                new_params = [
+                    parameter.replace(default=UNREPRESENTABLE)
+                    if parameter.default is ...
+                    else parameter
+                    for parameter in sig.parameters.values()
+                ]
+                return sig.replace(parameters=new_params)
+            else:
+                raise
     except Exception:
         # inspect.signature throws ValueError all the time
         # catch RuntimeError because of https://bugs.python.org/issue39504
diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py
index a2e9668a9ac4..34b266115166 100644
--- a/mypy/test/teststubtest.py
+++ b/mypy/test/teststubtest.py
@@ -428,6 +428,16 @@ def test_default_value(self) -> Iterator[Case]:
             error=None,
         )
 
+        # Simulate "<unrepresentable>"
+        yield Case(
+            stub="def f11() -> None: ...",
+            runtime="""
+            def f11(text=None) -> None: pass
+            f11.__text_signature__ = "(text=<unrepresentable>)"
+            """,
+            error="f11",
+        )
+
     @collect_cases
     def test_static_class_method(self) -> Iterator[Case]:
         yield Case(
@@ -2281,6 +2291,14 @@ def f(a: int, b: int, *, c: int, d: int = 0, **kwargs: Any) -> None:
             == "def (a, b, *, c, d = ..., **kwargs)"
         )
 
+    def test_builtin_signature_with_unrepresentable_default(self) -> None:
+        sig = mypy.stubtest.safe_inspect_signature(bytes.hex)
+        assert sig is not None
+        assert (
+            str(mypy.stubtest.Signature.from_inspect_signature(sig))
+            == "def (self, sep = ..., bytes_per_sep = ...)"
+        )
+
     def test_config_file(self) -> None:
         runtime = "temp = 5\n"
         stub = "from decimal import Decimal\ntemp: Decimal\n"

From a3e488d24e25688e74f32ced52ba560f77791b8c Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Sun, 19 Nov 2023 17:33:41 +0000
Subject: [PATCH 264/288] An attempt to fix mypyc tests on MacOS (#16520)

Fixes https://github.com/python/mypy/issues/16420

Although this is not 100% clear yet, but after 20 runs on a Mac I have
it no longer fails (without this patch it failed 20% of times). Btw,
contrary to the comment, _my_ Linux Mint (which is an Ubuntu derivative)
works perfectly (i.e. test passed 20 times even after I removed the
`sleep()`). So it is not really Mac vs Linux issue.
---
 mypyc/test/test_run.py | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py
index df9d44eab73f..f5c902bf3b3d 100644
--- a/mypyc/test/test_run.py
+++ b/mypyc/test/test_run.py
@@ -172,12 +172,10 @@ def run_case_inner(self, testcase: DataDrivenTestCase) -> None:
             # new by distutils, shift the mtime of all of the
             # generated artifacts back by a second.
             fudge_dir_mtimes(WORKDIR, -1)
-            # On Ubuntu, changing the mtime doesn't work reliably. As
+            # On some OS, changing the mtime doesn't work reliably. As
             # a workaround, sleep.
-            #
             # TODO: Figure out a better approach, since this slows down tests.
-            if sys.platform == "linux":
-                time.sleep(1.0)
+            time.sleep(1.0)
 
             step += 1
             with chdir_manager(".."):

From 3e6b5528e662d0accb6def21d6963361fe894301 Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 22 Nov 2023 11:05:27 +0000
Subject: [PATCH 265/288] Make imprecise constraints handling more robust
 (#16502)

Fixes https://github.com/python/mypy/issues/16485

My initial implementation of imprecise constraints fallback was really
fragile and ad-hoc, and I now see several edge case scenarios where we
may end up using imprecise constraints for a `ParamSpec` while some
precise ones are available. So I re-organized it: now we just infer
everything as normally, and filter out imprecise (if needed) at the very
end, when we have the full picture. I also fix an accidental omission in
`expand_type()`.
---
 mypy/constraints.py                           | 76 +++++++++++--------
 mypy/expandtype.py                            |  1 +
 .../unit/check-parameter-specification.test   | 23 ++++++
 3 files changed, 67 insertions(+), 33 deletions(-)

diff --git a/mypy/constraints.py b/mypy/constraints.py
index 88ede372e011..d6a4b28799e5 100644
--- a/mypy/constraints.py
+++ b/mypy/constraints.py
@@ -226,25 +226,22 @@ def infer_constraints_for_callable(
                 actual_type = mapper.expand_actual_type(
                     actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i]
                 )
-                if (
-                    param_spec
-                    and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2)
-                    and not incomplete_star_mapping
-                ):
+                if param_spec and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2):
                     # If actual arguments are mapped to ParamSpec type, we can't infer individual
                     # constraints, instead store them and infer single constraint at the end.
                     # It is impossible to map actual kind to formal kind, so use some heuristic.
                     # This inference is used as a fallback, so relying on heuristic should be OK.
-                    param_spec_arg_types.append(
-                        mapper.expand_actual_type(
-                            actual_arg_type, arg_kinds[actual], None, arg_kinds[actual]
+                    if not incomplete_star_mapping:
+                        param_spec_arg_types.append(
+                            mapper.expand_actual_type(
+                                actual_arg_type, arg_kinds[actual], None, arg_kinds[actual]
+                            )
                         )
-                    )
-                    actual_kind = arg_kinds[actual]
-                    param_spec_arg_kinds.append(
-                        ARG_POS if actual_kind not in (ARG_STAR, ARG_STAR2) else actual_kind
-                    )
-                    param_spec_arg_names.append(arg_names[actual] if arg_names else None)
+                        actual_kind = arg_kinds[actual]
+                        param_spec_arg_kinds.append(
+                            ARG_POS if actual_kind not in (ARG_STAR, ARG_STAR2) else actual_kind
+                        )
+                        param_spec_arg_names.append(arg_names[actual] if arg_names else None)
                 else:
                     c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF)
                     constraints.extend(c)
@@ -267,6 +264,9 @@ def infer_constraints_for_callable(
                 ),
             )
         )
+    if any(isinstance(v, ParamSpecType) for v in callee.variables):
+        # As a perf optimization filter imprecise constraints only when we can have them.
+        constraints = filter_imprecise_kinds(constraints)
     return constraints
 
 
@@ -1094,29 +1094,18 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]:
                 )
 
                 param_spec_target: Type | None = None
-                skip_imprecise = (
-                    any(c.type_var == param_spec.id for c in res) and cactual.imprecise_arg_kinds
-                )
                 if not cactual_ps:
                     max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)])
                     prefix_len = min(prefix_len, max_prefix_len)
-                    # This logic matches top-level callable constraint exception, if we managed
-                    # to get other constraints for ParamSpec, don't infer one with imprecise kinds
-                    if not skip_imprecise:
-                        param_spec_target = Parameters(
-                            arg_types=cactual.arg_types[prefix_len:],
-                            arg_kinds=cactual.arg_kinds[prefix_len:],
-                            arg_names=cactual.arg_names[prefix_len:],
-                            variables=cactual.variables
-                            if not type_state.infer_polymorphic
-                            else [],
-                            imprecise_arg_kinds=cactual.imprecise_arg_kinds,
-                        )
+                    param_spec_target = Parameters(
+                        arg_types=cactual.arg_types[prefix_len:],
+                        arg_kinds=cactual.arg_kinds[prefix_len:],
+                        arg_names=cactual.arg_names[prefix_len:],
+                        variables=cactual.variables if not type_state.infer_polymorphic else [],
+                        imprecise_arg_kinds=cactual.imprecise_arg_kinds,
+                    )
                 else:
-                    if (
-                        len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types)
-                        and not skip_imprecise
-                    ):
+                    if len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types):
                         param_spec_target = cactual_ps.copy_modified(
                             prefix=Parameters(
                                 arg_types=cactual_ps.prefix.arg_types[prefix_len:],
@@ -1611,3 +1600,24 @@ def infer_callable_arguments_constraints(
                 infer_directed_arg_constraints(left_by_name.typ, right_by_name.typ, direction)
             )
     return res
+
+
+def filter_imprecise_kinds(cs: list[Constraint]) -> list[Constraint]:
+    """For each ParamSpec remove all imprecise constraints, if at least one precise available."""
+    have_precise = set()
+    for c in cs:
+        if not isinstance(c.origin_type_var, ParamSpecType):
+            continue
+        if (
+            isinstance(c.target, ParamSpecType)
+            or isinstance(c.target, Parameters)
+            and not c.target.imprecise_arg_kinds
+        ):
+            have_precise.add(c.type_var)
+    new_cs = []
+    for c in cs:
+        if not isinstance(c.origin_type_var, ParamSpecType) or c.type_var not in have_precise:
+            new_cs.append(c)
+        if not isinstance(c.target, Parameters) or not c.target.imprecise_arg_kinds:
+            new_cs.append(c)
+    return new_cs
diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index cb09a1ee99f5..3acec4b96d06 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -253,6 +253,7 @@ def visit_param_spec(self, t: ParamSpecType) -> Type:
                 t.prefix.arg_kinds + repl.arg_kinds,
                 t.prefix.arg_names + repl.arg_names,
                 variables=[*t.prefix.variables, *repl.variables],
+                imprecise_arg_kinds=repl.imprecise_arg_kinds,
             )
         else:
             # We could encode Any as trivial parameters etc., but it would be too verbose.
diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test
index d65a0214b599..af2be84f5412 100644
--- a/test-data/unit/check-parameter-specification.test
+++ b/test-data/unit/check-parameter-specification.test
@@ -2163,3 +2163,26 @@ def func2(arg: T) -> List[Union[T, str]]:
 reveal_type(func2)  # N: Revealed type is "def [T] (arg: T`-1) -> Union[T`-1, builtins.str]"
 reveal_type(func2(42))  # N: Revealed type is "Union[builtins.int, builtins.str]"
 [builtins fixtures/paramspec.pyi]
+
+[case testParamSpecPreciseKindsUsedIfPossible]
+from typing import Callable, Generic
+from typing_extensions import ParamSpec
+
+P = ParamSpec('P')
+
+class Case(Generic[P]):
+    def __init__(self, *args: P.args, **kwargs: P.kwargs) -> None:
+        pass
+
+def _test(a: int, b: int = 0) -> None: ...
+
+def parametrize(
+    func: Callable[P, None], *cases: Case[P], **named_cases: Case[P]
+) -> Callable[[], None]:
+    ...
+
+parametrize(_test, Case(1, 2), Case(3, 4))
+parametrize(_test, Case(1, b=2), Case(3, b=4))
+parametrize(_test, Case(1, 2), Case(3))
+parametrize(_test, Case(1, 2), Case(3, b=4))
+[builtins fixtures/paramspec.pyi]

From fc811aedbf696c54da144851ccaeeceb19ec9a5e Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 22 Nov 2023 11:10:17 +0000
Subject: [PATCH 266/288] Fix polymorphic application for callback protocols
 (#16514)

Fixes https://github.com/python/mypy/issues/16512

The problems were caused if same callback protocol appeared multiple
times in a signature. Previous logic confused this with a recursive
callback protocol.
---
 mypy/checkexpr.py                   | 16 +++++++++++-----
 test-data/unit/check-inference.test | 25 +++++++++++++++++++++++++
 2 files changed, 36 insertions(+), 5 deletions(-)

diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py
index da61833bbe5b..626584bc3a20 100644
--- a/mypy/checkexpr.py
+++ b/mypy/checkexpr.py
@@ -6209,11 +6209,16 @@ class PolyTranslator(TypeTranslator):
     See docstring for apply_poly() for details.
     """
 
-    def __init__(self, poly_tvars: Sequence[TypeVarLikeType]) -> None:
+    def __init__(
+        self,
+        poly_tvars: Iterable[TypeVarLikeType],
+        bound_tvars: frozenset[TypeVarLikeType] = frozenset(),
+        seen_aliases: frozenset[TypeInfo] = frozenset(),
+    ) -> None:
         self.poly_tvars = set(poly_tvars)
         # This is a simplified version of TypeVarScope used during semantic analysis.
-        self.bound_tvars: set[TypeVarLikeType] = set()
-        self.seen_aliases: set[TypeInfo] = set()
+        self.bound_tvars = bound_tvars
+        self.seen_aliases = seen_aliases
 
     def collect_vars(self, t: CallableType | Parameters) -> list[TypeVarLikeType]:
         found_vars = []
@@ -6289,10 +6294,11 @@ def visit_instance(self, t: Instance) -> Type:
         if t.args and t.type.is_protocol and t.type.protocol_members == ["__call__"]:
             if t.type in self.seen_aliases:
                 raise PolyTranslationError()
-            self.seen_aliases.add(t.type)
             call = find_member("__call__", t, t, is_operator=True)
             assert call is not None
-            return call.accept(self)
+            return call.accept(
+                PolyTranslator(self.poly_tvars, self.bound_tvars, self.seen_aliases | {t.type})
+            )
         return super().visit_instance(t)
 
 
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 6c98ba2088b1..953855e502d6 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -3788,3 +3788,28 @@ def func2(arg: T) -> List[Union[T, str]]:
 reveal_type(func2)  # N: Revealed type is "def [S] (S`4) -> Union[S`4, builtins.str]"
 reveal_type(func2(42))  # N: Revealed type is "Union[builtins.int, builtins.str]"
 [builtins fixtures/list.pyi]
+
+[case testInferenceAgainstGenericCallbackProtoMultiple]
+from typing import Callable, Protocol, TypeVar
+from typing_extensions import Concatenate, ParamSpec
+
+V_co = TypeVar("V_co", covariant=True)
+class Metric(Protocol[V_co]):
+    def __call__(self) -> V_co: ...
+
+T = TypeVar("T")
+P = ParamSpec("P")
+def simple_metric(func: Callable[Concatenate[int, P], T]) -> Callable[P, T]: ...
+
+@simple_metric
+def Negate(count: int, /, metric: Metric[float]) -> float: ...
+@simple_metric
+def Combine(count: int, m1: Metric[T], m2: Metric[T], /, *more: Metric[T]) -> T: ...
+
+reveal_type(Negate)  # N: Revealed type is "def (metric: __main__.Metric[builtins.float]) -> builtins.float"
+reveal_type(Combine)  # N: Revealed type is "def [T] (def () -> T`4, def () -> T`4, *more: def () -> T`4) -> T`4"
+
+def m1() -> float: ...
+def m2() -> float: ...
+reveal_type(Combine(m1, m2))  # N: Revealed type is "builtins.float"
+[builtins fixtures/list.pyi]

From 242ad2ac4dec105fbed37c177d4cff5944a00f1d Mon Sep 17 00:00:00 2001
From: Ivan Levkivskyi <levkivskyi@gmail.com>
Date: Wed, 22 Nov 2023 11:12:57 +0000
Subject: [PATCH 267/288] Fix crash on TypeGuard in __call__ (#16516)

Fixes https://github.com/python/mypy/issues/16187

Note there may be some more similar crashes, I don't handle them all
properly, for now I leave a TODO and replace the `assert` with `if`, so
at least we should not crash on an unhandled case.
---
 mypy/checker.py                     | 27 +++++++++++++++++----------
 test-data/unit/check-typeguard.test | 15 +++++++++++++++
 2 files changed, 32 insertions(+), 10 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index b9a9d3affb90..7c6f59fafdc8 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -5670,22 +5670,29 @@ def find_isinstance_check_helper(self, node: Expression) -> tuple[TypeMap, TypeM
                     if node.arg_kinds[0] != nodes.ARG_POS:
                         # the first argument might be used as a kwarg
                         called_type = get_proper_type(self.lookup_type(node.callee))
-                        assert isinstance(called_type, (CallableType, Overloaded))
+
+                        # TODO: there are some more cases in check_call() to handle.
+                        if isinstance(called_type, Instance):
+                            call = find_member(
+                                "__call__", called_type, called_type, is_operator=True
+                            )
+                            if call is not None:
+                                called_type = get_proper_type(call)
 
                         # *assuming* the overloaded function is correct, there's a couple cases:
                         #  1) The first argument has different names, but is pos-only. We don't
                         #     care about this case, the argument must be passed positionally.
                         #  2) The first argument allows keyword reference, therefore must be the
                         #     same between overloads.
-                        name = called_type.items[0].arg_names[0]
-
-                        if name in node.arg_names:
-                            idx = node.arg_names.index(name)
-                            # we want the idx-th variable to be narrowed
-                            expr = collapse_walrus(node.args[idx])
-                        else:
-                            self.fail(message_registry.TYPE_GUARD_POS_ARG_REQUIRED, node)
-                            return {}, {}
+                        if isinstance(called_type, (CallableType, Overloaded)):
+                            name = called_type.items[0].arg_names[0]
+                            if name in node.arg_names:
+                                idx = node.arg_names.index(name)
+                                # we want the idx-th variable to be narrowed
+                                expr = collapse_walrus(node.args[idx])
+                            else:
+                                self.fail(message_registry.TYPE_GUARD_POS_ARG_REQUIRED, node)
+                                return {}, {}
                     if literal(expr) == LITERAL_TYPE:
                         # Note: we wrap the target type, so that we can special case later.
                         # Namely, for isinstance() we use a normal meet, while TypeGuard is
diff --git a/test-data/unit/check-typeguard.test b/test-data/unit/check-typeguard.test
index b3b168e5c7c6..c48887bb016a 100644
--- a/test-data/unit/check-typeguard.test
+++ b/test-data/unit/check-typeguard.test
@@ -694,3 +694,18 @@ def foo(x: object) -> TypeGuard[List[str]]: ...
 def test(f: A[T]) -> T: ...
 reveal_type(test(foo))  # N: Revealed type is "builtins.str"
 [builtins fixtures/list.pyi]
+
+[case testNoCrashOnDunderCallTypeGuard]
+from typing_extensions import TypeGuard
+
+class A:
+    def __call__(self, x) -> TypeGuard[int]:
+        return True
+
+a: A
+assert a(x=1)
+
+x: object
+assert a(x=x)
+reveal_type(x)  # N: Revealed type is "builtins.int"
+[builtins fixtures/tuple.pyi]

From 5b1a231425ac807b7118aac6a68b633949412a36 Mon Sep 17 00:00:00 2001
From: Sveinung Gundersen <sveinugu@ifi.uio.no>
Date: Thu, 23 Nov 2023 22:08:09 +0100
Subject: [PATCH 268/288] Docs: update soft-error-limit default value to -1
 (#16542)

Default value of `MANY_ERRORS_THRESHOLD` was set to `-1` in
https://github.com/python/mypy/pull/15138, which is also the default
value of the `--soft-error-limit` CLI option. However the CLI docs were
not updated accordingly.
---
 docs/source/command_line.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst
index a810c35cb77f..09836e2ffd20 100644
--- a/docs/source/command_line.rst
+++ b/docs/source/command_line.rst
@@ -787,7 +787,7 @@ in error messages.
     disable reporting most additional errors. The limit only applies
     if it seems likely that most of the remaining errors will not be
     useful or they may be overly noisy. If ``N`` is negative, there is
-    no limit. The default limit is 200.
+    no limit. The default limit is -1.
 
 .. option:: --force-uppercase-builtins
 

From 50d6d0b145f3c4be2d7633d61c37244280217d76 Mon Sep 17 00:00:00 2001
From: Nikita Sobolev <mail@sobolevn.me>
Date: Sat, 25 Nov 2023 19:46:10 +0300
Subject: [PATCH 269/288] Do not allow class-level keywords for `NamedTuple`
 (#16526)

Refs #16521
---
 mypy/semanal.py                      |  2 +-
 mypy/semanal_namedtuple.py           | 10 +++++++++-
 mypy/semanal_typeddict.py            |  2 +-
 test-data/unit/check-namedtuple.test | 17 +++++++++++++++++
 4 files changed, 28 insertions(+), 3 deletions(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index 6714e8c56de9..3e3056a9adf7 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -775,7 +775,7 @@ def file_context(
             self.globals = file_node.names
             self.tvar_scope = TypeVarLikeScope()
 
-            self.named_tuple_analyzer = NamedTupleAnalyzer(options, self)
+            self.named_tuple_analyzer = NamedTupleAnalyzer(options, self, self.msg)
             self.typed_dict_analyzer = TypedDictAnalyzer(options, self, self.msg)
             self.enum_call_analyzer = EnumCallAnalyzer(options, self)
             self.newtype_analyzer = NewTypeAnalyzer(options, self, self.msg)
diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py
index 80cf1c4e184a..bc3c5dd61894 100644
--- a/mypy/semanal_namedtuple.py
+++ b/mypy/semanal_namedtuple.py
@@ -9,6 +9,7 @@
 from typing import Final, Iterator, List, Mapping, cast
 
 from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type
+from mypy.messages import MessageBuilder
 from mypy.nodes import (
     ARG_NAMED_OPT,
     ARG_OPT,
@@ -91,9 +92,12 @@
 
 
 class NamedTupleAnalyzer:
-    def __init__(self, options: Options, api: SemanticAnalyzerInterface) -> None:
+    def __init__(
+        self, options: Options, api: SemanticAnalyzerInterface, msg: MessageBuilder
+    ) -> None:
         self.options = options
         self.api = api
+        self.msg = msg
 
     def analyze_namedtuple_classdef(
         self, defn: ClassDef, is_stub_file: bool, is_func_scope: bool
@@ -204,6 +208,10 @@ def check_namedtuple_classdef(
                         )
                 else:
                     default_items[name] = stmt.rvalue
+        if defn.keywords:
+            for_function = ' for "__init_subclass__" of "NamedTuple"'
+            for key in defn.keywords:
+                self.msg.unexpected_keyword_argument_for_function(for_function, key, defn)
         return items, types, default_items, statements
 
     def check_namedtuple(
diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py
index f399d8872a32..13aab4de65e4 100644
--- a/mypy/semanal_typeddict.py
+++ b/mypy/semanal_typeddict.py
@@ -325,7 +325,7 @@ def analyze_typeddict_classdef_fields(
             total = require_bool_literal_argument(self.api, defn.keywords["total"], "total", True)
         if defn.keywords and defn.keywords.keys() != {"total"}:
             for_function = ' for "__init_subclass__" of "TypedDict"'
-            for key in defn.keywords.keys():
+            for key in defn.keywords:
                 if key == "total":
                     continue
                 self.msg.unexpected_keyword_argument_for_function(for_function, key, defn)
diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test
index 14e075339572..51b02b500bd1 100644
--- a/test-data/unit/check-namedtuple.test
+++ b/test-data/unit/check-namedtuple.test
@@ -1354,3 +1354,20 @@ class Test:
         self.item: self.Item  # E: Name "self.Item" is not defined
 [builtins fixtures/tuple.pyi]
 [typing fixtures/typing-namedtuple.pyi]
+
+[case testNoClassKeywordsForNamedTuple]
+from typing import NamedTuple
+class Test1(NamedTuple, x=1, y=2):  # E: Unexpected keyword argument "x" for "__init_subclass__" of "NamedTuple" \
+                                    # E: Unexpected keyword argument "y" for "__init_subclass__" of "NamedTuple"
+    ...
+
+class Meta(type): ...
+
+class Test2(NamedTuple, metaclass=Meta):  # E: Unexpected keyword argument "metaclass" for "__init_subclass__" of "NamedTuple"
+    ...
+
+# Technically this would work, but it is just easier for the implementation:
+class Test3(NamedTuple, metaclass=type):  # E: Unexpected keyword argument "metaclass" for "__init_subclass__" of "NamedTuple"
+    ...
+[builtins fixtures/tuple.pyi]
+[typing fixtures/typing-namedtuple.pyi]

From 9289a336f7e292e33790520b9fe1bdf7ed266124 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Sat, 25 Nov 2023 22:32:09 +0000
Subject: [PATCH 270/288] Speed up tests by simplifying test fixtures (#16560)

Move some definitions away from commonly used fixtures that are only
needed in one or two test cases, as they will slow down many test cases.

This speeds up `mypy/test/testcheck.py` by about 5% on my Linux desktop.
---
 test-data/unit/check-class-namedtuple.test |  2 +-
 test-data/unit/check-expressions.test      |  3 +-
 test-data/unit/check-python310.test        |  2 +-
 test-data/unit/check-tuples.test           |  6 ++
 test-data/unit/check-type-aliases.test     |  2 +-
 test-data/unit/check-typeddict.test        |  4 +-
 test-data/unit/fixtures/dict-full.pyi      | 83 ++++++++++++++++++++++
 test-data/unit/fixtures/dict.pyi           | 33 ++-------
 test-data/unit/fixtures/tuple.pyi          |  2 -
 9 files changed, 102 insertions(+), 35 deletions(-)
 create mode 100644 test-data/unit/fixtures/dict-full.pyi

diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test
index a095f212b900..f334b9011645 100644
--- a/test-data/unit/check-class-namedtuple.test
+++ b/test-data/unit/check-class-namedtuple.test
@@ -301,7 +301,7 @@ reveal_type(X._field_defaults)  # N: Revealed type is "builtins.dict[builtins.st
 # but it's inferred as `Mapping[str, object]` here due to the fixture we're using
 reveal_type(X.__annotations__)  # N: Revealed type is "typing.Mapping[builtins.str, builtins.object]"
 
-[builtins fixtures/dict.pyi]
+[builtins fixtures/dict-full.pyi]
 
 [case testNewNamedTupleUnit]
 from typing import NamedTuple
diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test
index 8fe68365e5ac..04b3f7a131cc 100644
--- a/test-data/unit/check-expressions.test
+++ b/test-data/unit/check-expressions.test
@@ -1589,8 +1589,7 @@ if str():
 ....a  # E: "ellipsis" has no attribute "a"
 
 class A: pass
-[builtins fixtures/dict.pyi]
-[out]
+[builtins fixtures/dict-full.pyi]
 
 
 -- Yield expression
diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test
index d3cdf3af849d..cbb26a130738 100644
--- a/test-data/unit/check-python310.test
+++ b/test-data/unit/check-python310.test
@@ -931,7 +931,7 @@ match x:
 reveal_type(x)  # N: Revealed type is "builtins.list[builtins.list[builtins.dict[builtins.int, builtins.int]]]"
 reveal_type(y)  # N: Revealed type is "builtins.int"
 reveal_type(z)  # N: Revealed type is "builtins.int"
-[builtins fixtures/dict.pyi]
+[builtins fixtures/dict-full.pyi]
 
 [case testMatchNonFinalMatchArgs]
 class A:
diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test
index 4f468b59fc3f..66115ca0c30d 100644
--- a/test-data/unit/check-tuples.test
+++ b/test-data/unit/check-tuples.test
@@ -957,6 +957,12 @@ for x in B(), A():
 [builtins fixtures/for.pyi]
 
 [case testTupleIterable]
+from typing import Iterable, Optional, TypeVar
+
+T = TypeVar("T")
+
+def sum(iterable: Iterable[T], start: Optional[T] = None) -> T: pass
+
 y = 'a'
 x = sum((1,2))
 if int():
diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test
index 46f5ff07f1ac..4364a9bfa9dc 100644
--- a/test-data/unit/check-type-aliases.test
+++ b/test-data/unit/check-type-aliases.test
@@ -1065,4 +1065,4 @@ def eval(e: Expr) -> int:
         return e[1]
     elif e[0] == 456:
         return -eval(e[1])
-[builtins fixtures/dict.pyi]
+[builtins fixtures/dict-full.pyi]
diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test
index dc808390021a..d8022f85574c 100644
--- a/test-data/unit/check-typeddict.test
+++ b/test-data/unit/check-typeddict.test
@@ -2708,7 +2708,7 @@ class TD(TypedDict):
 reveal_type(TD.__iter__)  # N: Revealed type is "def (typing._TypedDict) -> typing.Iterator[builtins.str]"
 reveal_type(TD.__annotations__)  # N: Revealed type is "typing.Mapping[builtins.str, builtins.object]"
 reveal_type(TD.values)  # N: Revealed type is "def (self: typing.Mapping[T`1, T_co`2]) -> typing.Iterable[T_co`2]"
-[builtins fixtures/dict.pyi]
+[builtins fixtures/dict-full.pyi]
 [typing fixtures/typing-typeddict.pyi]
 
 [case testGenericTypedDictAlias]
@@ -3299,7 +3299,7 @@ main:10: error: No overload variant of "__ror__" of "dict" matches argument type
 main:10: note: Possible overload variants:
 main:10: note:     def __ror__(self, Dict[Any, Any], /) -> Dict[Any, Any]
 main:10: note:     def [T, T2] __ror__(self, Dict[T, T2], /) -> Dict[Union[Any, T], Union[Any, T2]]
-[builtins fixtures/dict.pyi]
+[builtins fixtures/dict-full.pyi]
 [typing fixtures/typing-typeddict-iror.pyi]
 
 [case testTypedDictWith__ror__method]
diff --git a/test-data/unit/fixtures/dict-full.pyi b/test-data/unit/fixtures/dict-full.pyi
new file mode 100644
index 000000000000..f20369ce9332
--- /dev/null
+++ b/test-data/unit/fixtures/dict-full.pyi
@@ -0,0 +1,83 @@
+# Builtins stub used in dictionary-related test cases (more complete).
+
+from _typeshed import SupportsKeysAndGetItem
+import _typeshed
+from typing import (
+    TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union, Sequence,
+    Self,
+)
+
+T = TypeVar('T')
+T2 = TypeVar('T2')
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
+class object:
+    def __init__(self) -> None: pass
+    def __init_subclass__(cls) -> None: pass
+    def __eq__(self, other: object) -> bool: pass
+
+class type:
+    __annotations__: Mapping[str, object]
+
+class dict(Mapping[KT, VT]):
+    @overload
+    def __init__(self, **kwargs: VT) -> None: pass
+    @overload
+    def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass
+    def __getitem__(self, key: KT) -> VT: pass
+    def __setitem__(self, k: KT, v: VT) -> None: pass
+    def __iter__(self) -> Iterator[KT]: pass
+    def __contains__(self, item: object) -> int: pass
+    def update(self, a: SupportsKeysAndGetItem[KT, VT]) -> None: pass
+    @overload
+    def get(self, k: KT) -> Optional[VT]: pass
+    @overload
+    def get(self, k: KT, default: Union[VT, T]) -> Union[VT, T]: pass
+    def __len__(self) -> int: ...
+
+    # This was actually added in 3.9:
+    @overload
+    def __or__(self, __value: dict[KT, VT]) -> dict[KT, VT]: ...
+    @overload
+    def __or__(self, __value: dict[T, T2]) -> dict[Union[KT, T], Union[VT, T2]]: ...
+    @overload
+    def __ror__(self, __value: dict[KT, VT]) -> dict[KT, VT]: ...
+    @overload
+    def __ror__(self, __value: dict[T, T2]) -> dict[Union[KT, T], Union[VT, T2]]: ...
+    # dict.__ior__ should be kept roughly in line with MutableMapping.update()
+    @overload  # type: ignore[misc]
+    def __ior__(self, __value: _typeshed.SupportsKeysAndGetItem[KT, VT]) -> Self: ...
+    @overload
+    def __ior__(self, __value: Iterable[Tuple[KT, VT]]) -> Self: ...
+
+class int: # for convenience
+    def __add__(self, x: Union[int, complex]) -> int: pass
+    def __radd__(self, x: int) -> int: pass
+    def __sub__(self, x: Union[int, complex]) -> int: pass
+    def __neg__(self) -> int: pass
+    real: int
+    imag: int
+
+class str: pass # for keyword argument key type
+class bytes: pass
+
+class list(Sequence[T]): # needed by some test cases
+    def __getitem__(self, x: int) -> T: pass
+    def __iter__(self) -> Iterator[T]: pass
+    def __mul__(self, x: int) -> list[T]: pass
+    def __contains__(self, item: object) -> bool: pass
+    def append(self, item: T) -> None: pass
+
+class tuple(Generic[T]): pass
+class function: pass
+class float: pass
+class complex: pass
+class bool(int): pass
+
+class ellipsis:
+    __class__: object
+def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
+class BaseException: pass
+
+def iter(__iterable: Iterable[T]) -> Iterator[T]: pass
diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi
index 7c0c8767f7d7..ed2287511161 100644
--- a/test-data/unit/fixtures/dict.pyi
+++ b/test-data/unit/fixtures/dict.pyi
@@ -1,4 +1,7 @@
-# Builtins stub used in dictionary-related test cases.
+# Builtins stub used in dictionary-related test cases (stripped down).
+#
+# NOTE: Use dict-full.pyi if you need more builtins instead of adding here,
+#       if feasible.
 
 from _typeshed import SupportsKeysAndGetItem
 import _typeshed
@@ -14,11 +17,9 @@ VT = TypeVar('VT')
 
 class object:
     def __init__(self) -> None: pass
-    def __init_subclass__(cls) -> None: pass
     def __eq__(self, other: object) -> bool: pass
 
-class type:
-    __annotations__: Mapping[str, object]
+class type: pass
 
 class dict(Mapping[KT, VT]):
     @overload
@@ -36,28 +37,10 @@ class dict(Mapping[KT, VT]):
     def get(self, k: KT, default: Union[VT, T]) -> Union[VT, T]: pass
     def __len__(self) -> int: ...
 
-    # This was actually added in 3.9:
-    @overload
-    def __or__(self, __value: dict[KT, VT]) -> dict[KT, VT]: ...
-    @overload
-    def __or__(self, __value: dict[T, T2]) -> dict[Union[KT, T], Union[VT, T2]]: ...
-    @overload
-    def __ror__(self, __value: dict[KT, VT]) -> dict[KT, VT]: ...
-    @overload
-    def __ror__(self, __value: dict[T, T2]) -> dict[Union[KT, T], Union[VT, T2]]: ...
-    # dict.__ior__ should be kept roughly in line with MutableMapping.update()
-    @overload  # type: ignore[misc]
-    def __ior__(self, __value: _typeshed.SupportsKeysAndGetItem[KT, VT]) -> Self: ...
-    @overload
-    def __ior__(self, __value: Iterable[Tuple[KT, VT]]) -> Self: ...
-
 class int: # for convenience
     def __add__(self, x: Union[int, complex]) -> int: pass
     def __radd__(self, x: int) -> int: pass
     def __sub__(self, x: Union[int, complex]) -> int: pass
-    def __neg__(self) -> int: pass
-    real: int
-    imag: int
 
 class str: pass # for keyword argument key type
 class bytes: pass
@@ -74,10 +57,8 @@ class function: pass
 class float: pass
 class complex: pass
 class bool(int): pass
-
-class ellipsis:
-    __class__: object
-def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
+class ellipsis: pass
 class BaseException: pass
 
+def isinstance(x: object, t: Union[type, Tuple[type, ...]]) -> bool: pass
 def iter(__iterable: Iterable[T]) -> Iterator[T]: pass
diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi
index e270f3d79d3e..cb6347e9f2fd 100644
--- a/test-data/unit/fixtures/tuple.pyi
+++ b/test-data/unit/fixtures/tuple.pyi
@@ -49,8 +49,6 @@ class list(Sequence[T], Generic[T]):
 
 def isinstance(x: object, t: type) -> bool: pass
 
-def sum(iterable: Iterable[T], start: Optional[T] = None) -> T: pass
-
 class BaseException: pass
 
 class dict: pass

From 1200d1d956e589a0a33c86ef8a7cb3f5a9b64f1f Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Sat, 25 Nov 2023 22:32:20 +0000
Subject: [PATCH 271/288] Add fast path to analyzing special form assignments
 (#16561)

This showed up as hot spot in a CPU profile collected when running
tests.

This makes `mypy/test/testcheck.py` about 2% faster on my Linux desktop.
---
 mypy/semanal.py | 33 +++++++++++++++++----------------
 1 file changed, 17 insertions(+), 16 deletions(-)

diff --git a/mypy/semanal.py b/mypy/semanal.py
index 3e3056a9adf7..4128369ace5d 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -2855,22 +2855,23 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None:
         if self.check_and_set_up_type_alias(s):
             s.is_alias_def = True
             special_form = True
-        # * type variable definition
-        elif self.process_typevar_declaration(s):
-            special_form = True
-        elif self.process_paramspec_declaration(s):
-            special_form = True
-        elif self.process_typevartuple_declaration(s):
-            special_form = True
-        # * type constructors
-        elif self.analyze_namedtuple_assign(s):
-            special_form = True
-        elif self.analyze_typeddict_assign(s):
-            special_form = True
-        elif self.newtype_analyzer.process_newtype_declaration(s):
-            special_form = True
-        elif self.analyze_enum_assign(s):
-            special_form = True
+        elif isinstance(s.rvalue, CallExpr):
+            # * type variable definition
+            if self.process_typevar_declaration(s):
+                special_form = True
+            elif self.process_paramspec_declaration(s):
+                special_form = True
+            elif self.process_typevartuple_declaration(s):
+                special_form = True
+            # * type constructors
+            elif self.analyze_namedtuple_assign(s):
+                special_form = True
+            elif self.analyze_typeddict_assign(s):
+                special_form = True
+            elif self.newtype_analyzer.process_newtype_declaration(s):
+                special_form = True
+            elif self.analyze_enum_assign(s):
+                special_form = True
 
         if special_form:
             self.record_special_form_lvalue(s)

From e69c5cde8643e04a54a644cc27814ab98181541d Mon Sep 17 00:00:00 2001
From: Ali Hamdan <ali.hamdan.dev@gmail.com>
Date: Mon, 27 Nov 2023 03:27:08 +0200
Subject: [PATCH 272/288] stubgen: Preserve simple defaults in function
 signatures (#15355)

Fixes #13238

See also https://github.com/python/typeshed/issues/8988
---
 mypy/stubdoc.py             | 15 +++++--
 mypy/stubgen.py             | 73 +++++++++++++++++++++++++++++-
 test-data/unit/stubgen.test | 89 ++++++++++++++++++++++++++++---------
 3 files changed, 153 insertions(+), 24 deletions(-)

diff --git a/mypy/stubdoc.py b/mypy/stubdoc.py
index c277573f0b59..126ac44e142e 100644
--- a/mypy/stubdoc.py
+++ b/mypy/stubdoc.py
@@ -36,11 +36,19 @@ def is_valid_type(s: str) -> bool:
 class ArgSig:
     """Signature info for a single argument."""
 
-    def __init__(self, name: str, type: str | None = None, default: bool = False):
+    def __init__(
+        self,
+        name: str,
+        type: str | None = None,
+        *,
+        default: bool = False,
+        default_value: str = "...",
+    ) -> None:
         self.name = name
         self.type = type
         # Does this argument have a default value?
         self.default = default
+        self.default_value = default_value
 
     def is_star_arg(self) -> bool:
         return self.name.startswith("*") and not self.name.startswith("**")
@@ -59,6 +67,7 @@ def __eq__(self, other: Any) -> bool:
                 self.name == other.name
                 and self.type == other.type
                 and self.default == other.default
+                and self.default_value == other.default_value
             )
         return False
 
@@ -119,10 +128,10 @@ def format_sig(
             if arg_type:
                 arg_def += ": " + arg_type
                 if arg.default:
-                    arg_def += " = ..."
+                    arg_def += f" = {arg.default_value}"
 
             elif arg.default:
-                arg_def += "=..."
+                arg_def += f"={arg.default_value}"
 
             args.append(arg_def)
 
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index 837cd723c410..fff6ab058459 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -99,6 +99,7 @@
     NameExpr,
     OpExpr,
     OverloadedFuncDef,
+    SetExpr,
     Statement,
     StrExpr,
     TempNode,
@@ -491,15 +492,21 @@ def _get_func_args(self, o: FuncDef, ctx: FunctionContext) -> list[ArgSig]:
             if kind.is_named() and not any(arg.name.startswith("*") for arg in args):
                 args.append(ArgSig("*"))
 
+            default = "..."
             if arg_.initializer:
                 if not typename:
                     typename = self.get_str_type_of_node(arg_.initializer, True, False)
+                potential_default, valid = self.get_str_default_of_node(arg_.initializer)
+                if valid and len(potential_default) <= 200:
+                    default = potential_default
             elif kind == ARG_STAR:
                 name = f"*{name}"
             elif kind == ARG_STAR2:
                 name = f"**{name}"
 
-            args.append(ArgSig(name, typename, default=bool(arg_.initializer)))
+            args.append(
+                ArgSig(name, typename, default=bool(arg_.initializer), default_value=default)
+            )
 
         if ctx.class_info is not None and all(
             arg.type is None and arg.default is False for arg in args
@@ -1234,6 +1241,70 @@ def maybe_unwrap_unary_expr(self, expr: Expression) -> Expression:
         # This is some other unary expr, we cannot do anything with it (yet?).
         return expr
 
+    def get_str_default_of_node(self, rvalue: Expression) -> tuple[str, bool]:
+        """Get a string representation of the default value of a node.
+
+        Returns a 2-tuple of the default and whether or not it is valid.
+        """
+        if isinstance(rvalue, NameExpr):
+            if rvalue.name in ("None", "True", "False"):
+                return rvalue.name, True
+        elif isinstance(rvalue, (IntExpr, FloatExpr)):
+            return f"{rvalue.value}", True
+        elif isinstance(rvalue, UnaryExpr):
+            if isinstance(rvalue.expr, (IntExpr, FloatExpr)):
+                return f"{rvalue.op}{rvalue.expr.value}", True
+        elif isinstance(rvalue, StrExpr):
+            return repr(rvalue.value), True
+        elif isinstance(rvalue, BytesExpr):
+            return "b" + repr(rvalue.value).replace("\\\\", "\\"), True
+        elif isinstance(rvalue, TupleExpr):
+            items_defaults = []
+            for e in rvalue.items:
+                e_default, valid = self.get_str_default_of_node(e)
+                if not valid:
+                    break
+                items_defaults.append(e_default)
+            else:
+                closing = ",)" if len(items_defaults) == 1 else ")"
+                default = "(" + ", ".join(items_defaults) + closing
+                return default, True
+        elif isinstance(rvalue, ListExpr):
+            items_defaults = []
+            for e in rvalue.items:
+                e_default, valid = self.get_str_default_of_node(e)
+                if not valid:
+                    break
+                items_defaults.append(e_default)
+            else:
+                default = "[" + ", ".join(items_defaults) + "]"
+                return default, True
+        elif isinstance(rvalue, SetExpr):
+            items_defaults = []
+            for e in rvalue.items:
+                e_default, valid = self.get_str_default_of_node(e)
+                if not valid:
+                    break
+                items_defaults.append(e_default)
+            else:
+                if items_defaults:
+                    default = "{" + ", ".join(items_defaults) + "}"
+                    return default, True
+        elif isinstance(rvalue, DictExpr):
+            items_defaults = []
+            for k, v in rvalue.items:
+                if k is None:
+                    break
+                k_default, k_valid = self.get_str_default_of_node(k)
+                v_default, v_valid = self.get_str_default_of_node(v)
+                if not (k_valid and v_valid):
+                    break
+                items_defaults.append(f"{k_default}: {v_default}")
+            else:
+                default = "{" + ", ".join(items_defaults) + "}"
+                return default, True
+        return "...", False
+
     def should_reexport(self, name: str, full_module: str, name_is_alias: bool) -> bool:
         is_private = self.is_private_name(name, full_module + "." + name)
         if (
diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test
index 2a43ce16383d..cd38242ce031 100644
--- a/test-data/unit/stubgen.test
+++ b/test-data/unit/stubgen.test
@@ -27,20 +27,20 @@ def g(arg) -> None: ...
 def f(a, b=2): ...
 def g(b=-1, c=0): ...
 [out]
-def f(a, b: int = ...) -> None: ...
-def g(b: int = ..., c: int = ...) -> None: ...
+def f(a, b: int = 2) -> None: ...
+def g(b: int = -1, c: int = 0) -> None: ...
 
 [case testDefaultArgNone]
 def f(x=None): ...
 [out]
 from _typeshed import Incomplete
 
-def f(x: Incomplete | None = ...) -> None: ...
+def f(x: Incomplete | None = None) -> None: ...
 
 [case testDefaultArgBool]
 def f(x=True, y=False): ...
 [out]
-def f(x: bool = ..., y: bool = ...) -> None: ...
+def f(x: bool = True, y: bool = False) -> None: ...
 
 [case testDefaultArgBool_inspect]
 def f(x=True, y=False): ...
@@ -48,9 +48,9 @@ def f(x=True, y=False): ...
 def f(x: bool = ..., y: bool = ...): ...
 
 [case testDefaultArgStr]
-def f(x='foo'): ...
+def f(x='foo',y="how's quotes"): ...
 [out]
-def f(x: str = ...) -> None: ...
+def f(x: str = 'foo', y: str = "how's quotes") -> None: ...
 
 [case testDefaultArgStr_inspect]
 def f(x='foo'): ...
@@ -58,14 +58,16 @@ def f(x='foo'): ...
 def f(x: str = ...): ...
 
 [case testDefaultArgBytes]
-def f(x=b'foo'): ...
+def f(x=b'foo',y=b"what's up",z=b'\xc3\xa0 la une'): ...
 [out]
-def f(x: bytes = ...) -> None: ...
+def f(x: bytes = b'foo', y: bytes = b"what's up", z: bytes = b'\xc3\xa0 la une') -> None: ...
 
 [case testDefaultArgFloat]
-def f(x=1.2): ...
+def f(x=1.2,y=1e-6,z=0.0,w=-0.0,v=+1.0): ...
+def g(x=float("nan"), y=float("inf"), z=float("-inf")): ...
 [out]
-def f(x: float = ...) -> None: ...
+def f(x: float = 1.2, y: float = 1e-06, z: float = 0.0, w: float = -0.0, v: float = +1.0) -> None: ...
+def g(x=..., y=..., z=...) -> None: ...
 
 [case testDefaultArgOther]
 def f(x=ord): ...
@@ -126,10 +128,10 @@ def i(a, *, b=1): ...
 def j(a, *, b=1, **c): ...
 [out]
 def f(a, *b, **c) -> None: ...
-def g(a, *b, c: int = ...) -> None: ...
-def h(a, *b, c: int = ..., **d) -> None: ...
-def i(a, *, b: int = ...) -> None: ...
-def j(a, *, b: int = ..., **c) -> None: ...
+def g(a, *b, c: int = 1) -> None: ...
+def h(a, *b, c: int = 1, **d) -> None: ...
+def i(a, *, b: int = 1) -> None: ...
+def j(a, *, b: int = 1, **c) -> None: ...
 
 [case testClass]
 class A:
@@ -356,8 +358,8 @@ y: Incomplete
 def f(x, *, y=1): ...
 def g(x, *, y=1, z=2): ...
 [out]
-def f(x, *, y: int = ...) -> None: ...
-def g(x, *, y: int = ..., z: int = ...) -> None: ...
+def f(x, *, y: int = 1) -> None: ...
+def g(x, *, y: int = 1, z: int = 2) -> None: ...
 
 [case testProperty]
 class A:
@@ -1285,8 +1287,8 @@ from _typeshed import Incomplete
 
 class A:
     x: Incomplete
-    def __init__(self, a: Incomplete | None = ...) -> None: ...
-    def method(self, a: Incomplete | None = ...) -> None: ...
+    def __init__(self, a: Incomplete | None = None) -> None: ...
+    def method(self, a: Incomplete | None = None) -> None: ...
 
 [case testAnnotationImportsFrom]
 import foo
@@ -2514,7 +2516,7 @@ from _typeshed import Incomplete as _Incomplete
 
 Y: _Incomplete
 
-def g(x: _Incomplete | None = ...) -> None: ...
+def g(x: _Incomplete | None = None) -> None: ...
 
 x: _Incomplete
 
@@ -3503,7 +3505,7 @@ class P(Protocol):
 [case testNonDefaultKeywordOnlyArgAfterAsterisk]
 def func(*, non_default_kwarg: bool, default_kwarg: bool = True): ...
 [out]
-def func(*, non_default_kwarg: bool, default_kwarg: bool = ...): ...
+def func(*, non_default_kwarg: bool, default_kwarg: bool = True): ...
 
 [case testNestedGenerator]
 def f1():
@@ -3909,6 +3911,53 @@ def gen2() -> _Generator[_Incomplete, _Incomplete, _Incomplete]: ...
 class X(_Incomplete): ...
 class Y(_Incomplete): ...
 
+[case testIgnoreLongDefaults]
+def f(x='abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\
+abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\
+abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\
+abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz'): ...
+
+def g(x=b'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\
+abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\
+abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\
+abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz'): ...
+
+def h(x=123456789012345678901234567890123456789012345678901234567890\
+123456789012345678901234567890123456789012345678901234567890\
+123456789012345678901234567890123456789012345678901234567890\
+123456789012345678901234567890123456789012345678901234567890): ...
+
+[out]
+def f(x: str = ...) -> None: ...
+def g(x: bytes = ...) -> None: ...
+def h(x: int = ...) -> None: ...
+
+[case testDefaultsOfBuiltinContainers]
+def f(x=(), y=(1,), z=(1, 2)): ...
+def g(x=[], y=[1, 2]): ...
+def h(x={}, y={1: 2, 3: 4}): ...
+def i(x={1, 2, 3}): ...
+def j(x=[(1,"a"), (2,"b")]): ...
+
+[out]
+def f(x=(), y=(1,), z=(1, 2)) -> None: ...
+def g(x=[], y=[1, 2]) -> None: ...
+def h(x={}, y={1: 2, 3: 4}) -> None: ...
+def i(x={1, 2, 3}) -> None: ...
+def j(x=[(1, 'a'), (2, 'b')]) -> None: ...
+
+[case testDefaultsOfBuiltinContainersWithNonTrivialContent]
+def f(x=(1, u.v), y=(k(),), z=(w,)): ...
+def g(x=[1, u.v], y=[k()], z=[w]): ...
+def h(x={1: u.v}, y={k(): 2}, z={m: m}, w={**n}): ...
+def i(x={u.v, 2}, y={3, k()}, z={w}): ...
+
+[out]
+def f(x=..., y=..., z=...) -> None: ...
+def g(x=..., y=..., z=...) -> None: ...
+def h(x=..., y=..., z=..., w=...) -> None: ...
+def i(x=..., y=..., z=...) -> None: ...
+
 [case testDataclass]
 import dataclasses
 import dataclasses as dcs

From 379d59e1a7e121d5b7f75aed26944620d4ccff37 Mon Sep 17 00:00:00 2001
From: Jukka Lehtosalo <jukka.lehtosalo@iki.fi>
Date: Mon, 27 Nov 2023 16:41:36 +0000
Subject: [PATCH 273/288] Fix multiprocessing warnings when runnign tests on
 Python 3.12 (#16564)

I saw a bunch of warnings when running tests in parallel using pytest.
When running tests sequentially using `-n0` I didn't see warnings. This
only seems to happen on Linux.

The warnings were like these, which can be fixed by avoiding the use of
fork, and using forkserver instead:
```
mypy/test/teststubgen.py::StubgenPythonSuite::stubgen.test::testNestedClass_inspect
  /usr/local/lib/python3.12/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=84587) is multi-threaded, use of fork() may lead to deadlocks in the child.
    self.pid = os.fork()
```

Relevant discussion:

https://discuss.python.org/t/concerns-regarding-deprecation-of-fork-with-alive-threads/33555
---
 mypy/moduleinspect.py | 12 ++++++++----
 mypy/test/testipc.py  | 21 ++++++++++++++-------
 2 files changed, 22 insertions(+), 11 deletions(-)

diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py
index 580b31fb4107..35db2132f66c 100644
--- a/mypy/moduleinspect.py
+++ b/mypy/moduleinspect.py
@@ -8,7 +8,7 @@
 import pkgutil
 import queue
 import sys
-from multiprocessing import Process, Queue
+from multiprocessing import Queue, get_context
 from types import ModuleType
 
 
@@ -123,9 +123,13 @@ def __init__(self) -> None:
         self._start()
 
     def _start(self) -> None:
-        self.tasks: Queue[str] = Queue()
-        self.results: Queue[ModuleProperties | str] = Queue()
-        self.proc = Process(target=worker, args=(self.tasks, self.results, sys.path))
+        if sys.platform == "linux":
+            ctx = get_context("forkserver")
+        else:
+            ctx = get_context("spawn")
+        self.tasks: Queue[str] = ctx.Queue()
+        self.results: Queue[ModuleProperties | str] = ctx.Queue()
+        self.proc = ctx.Process(target=worker, args=(self.tasks, self.results, sys.path))
         self.proc.start()
         self.counter = 0  # Number of successful roundtrips
 
diff --git a/mypy/test/testipc.py b/mypy/test/testipc.py
index 8ef656dc4579..0224035a7b61 100644
--- a/mypy/test/testipc.py
+++ b/mypy/test/testipc.py
@@ -2,7 +2,7 @@
 
 import sys
 import time
-from multiprocessing import Process, Queue
+from multiprocessing import Queue, get_context
 from unittest import TestCase, main
 
 import pytest
@@ -35,10 +35,17 @@ def server_multi_message_echo(q: Queue[str]) -> None:
 
 
 class IPCTests(TestCase):
+    def setUp(self) -> None:
+        if sys.platform == "linux":
+            # The default "fork" start method is potentially unsafe
+            self.ctx = get_context("forkserver")
+        else:
+            self.ctx = get_context("spawn")
+
     def test_transaction_large(self) -> None:
-        queue: Queue[str] = Queue()
+        queue: Queue[str] = self.ctx.Queue()
         msg = "t" * 200000  # longer than the max read size of 100_000
-        p = Process(target=server, args=(msg, queue), daemon=True)
+        p = self.ctx.Process(target=server, args=(msg, queue), daemon=True)
         p.start()
         connection_name = queue.get()
         with IPCClient(connection_name, timeout=1) as client:
@@ -49,9 +56,9 @@ def test_transaction_large(self) -> None:
         p.join()
 
     def test_connect_twice(self) -> None:
-        queue: Queue[str] = Queue()
+        queue: Queue[str] = self.ctx.Queue()
         msg = "this is a test message"
-        p = Process(target=server, args=(msg, queue), daemon=True)
+        p = self.ctx.Process(target=server, args=(msg, queue), daemon=True)
         p.start()
         connection_name = queue.get()
         with IPCClient(connection_name, timeout=1) as client:
@@ -67,8 +74,8 @@ def test_connect_twice(self) -> None:
         assert p.exitcode == 0
 
     def test_multiple_messages(self) -> None:
-        queue: Queue[str] = Queue()
-        p = Process(target=server_multi_message_echo, args=(queue,), daemon=True)
+        queue: Queue[str] = self.ctx.Queue()
+        p = self.ctx.Process(target=server_multi_message_echo, args=(queue,), daemon=True)
         p.start()
         connection_name = queue.get()
         with IPCClient(connection_name, timeout=1) as client:

From 69b31445280d7c495fa0268b24fc558bcbe74505 Mon Sep 17 00:00:00 2001
From: Chad Dombrova <chadrik@gmail.com>
Date: Wed, 29 Nov 2023 00:14:38 -0500
Subject: [PATCH 274/288] Fix stubgen regressions with pybind11 and mypy 1.7
 (#16504)

This addresses several regressions identified in
https://github.com/python/mypy/issues/16486

The primary regression from https://github.com/python/mypy/pull/15770 is
that pybind11 properties with docstrings were erroneously assigned
`typeshed. Incomplete`.

The reason for the regression is that as of the introduction of the
`--include-docstring` feature
(https://github.com/python/mypy/pull/13284, not my PR, ftr),
`./misc/test-stubgenc.sh` began always reporting success. That has been
fixed.

It was also pointed out that `--include-docstring` does not work for
C-extensions. This was not actually a regression as it turns out this
feature was never implemented for C-extensions (though the tests
suggested it had been), but luckily my efforts to unify the pure-python
and C-extension code-paths made fixing this super easy (barely an
inconvenience)! So that is working now.

I added back the extended list of `typing` objects that generate
implicit imports for the inspection-based stub generator. I originally
removed these because I encountered an issue generating stubs for
`PySide2` (and another internal library) where there was an object with
the same name as one of the `typing` objects and the auto-import created
broken stubs. I felt somewhat justified in this decision as there was a
straightforward solution -- e.g. use `list` or `typing.List` instead of
`List`. That said, I recognize that the problem that I encountered is
more niche than the general desire to add import statements for typing
objects, so I've changed the behavior back for now, with the intention
to eventually add a flag to control this behavior.
---
 misc/test-stubgenc.sh                         |  2 +-
 mypy/stubdoc.py                               |  3 +-
 mypy/stubgen.py                               |  1 +
 mypy/stubgenc.py                              | 52 +++++++++++++++++--
 mypy/stubutil.py                              | 18 +++----
 test-data/pybind11_mypy_demo/src/main.cpp     | 11 +++-
 .../pybind11_mypy_demo/__init__.pyi           |  1 +
 .../pybind11_mypy_demo/basics.pyi             | 32 ++++++------
 .../stubgen/pybind11_mypy_demo/basics.pyi     |  3 +-
 9 files changed, 88 insertions(+), 35 deletions(-)

diff --git a/misc/test-stubgenc.sh b/misc/test-stubgenc.sh
index 7713e1b04e43..5cb5140eba76 100755
--- a/misc/test-stubgenc.sh
+++ b/misc/test-stubgenc.sh
@@ -24,7 +24,7 @@ function stubgenc_test() {
     # Compare generated stubs to expected ones
     if ! git diff --exit-code "$STUBGEN_OUTPUT_FOLDER";
     then
-        EXIT=$?
+        EXIT=1
     fi
 }
 
diff --git a/mypy/stubdoc.py b/mypy/stubdoc.py
index 126ac44e142e..86ff6e2bb540 100644
--- a/mypy/stubdoc.py
+++ b/mypy/stubdoc.py
@@ -383,7 +383,8 @@ def infer_ret_type_sig_from_docstring(docstr: str, name: str) -> str | None:
 
 def infer_ret_type_sig_from_anon_docstring(docstr: str) -> str | None:
     """Convert signature in form of "(self: TestClass, arg0) -> int" to their return type."""
-    return infer_ret_type_sig_from_docstring("stub" + docstr.strip(), "stub")
+    lines = ["stub" + line.strip() for line in docstr.splitlines() if line.strip().startswith("(")]
+    return infer_ret_type_sig_from_docstring("".join(lines), "stub")
 
 
 def parse_signature(sig: str) -> tuple[str, list[str], list[str]] | None:
diff --git a/mypy/stubgen.py b/mypy/stubgen.py
index fff6ab058459..23b5fde9dff2 100755
--- a/mypy/stubgen.py
+++ b/mypy/stubgen.py
@@ -1700,6 +1700,7 @@ def generate_stubs(options: Options) -> None:
                 doc_dir=options.doc_dir,
                 include_private=options.include_private,
                 export_less=options.export_less,
+                include_docstrings=options.include_docstrings,
             )
     num_modules = len(all_modules)
     if not options.quiet and num_modules > 0:
diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py
index 0ad79a4265b3..39288197f477 100755
--- a/mypy/stubgenc.py
+++ b/mypy/stubgenc.py
@@ -126,10 +126,12 @@ def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> s
         """Infer property type from docstring or docstring signature."""
         if ctx.docstring is not None:
             inferred = infer_ret_type_sig_from_anon_docstring(ctx.docstring)
-            if not inferred:
-                inferred = infer_ret_type_sig_from_docstring(ctx.docstring, ctx.name)
-            if not inferred:
-                inferred = infer_prop_type_from_docstring(ctx.docstring)
+            if inferred:
+                return inferred
+            inferred = infer_ret_type_sig_from_docstring(ctx.docstring, ctx.name)
+            if inferred:
+                return inferred
+            inferred = infer_prop_type_from_docstring(ctx.docstring)
             return inferred
         else:
             return None
@@ -237,6 +239,26 @@ def __init__(
         self.resort_members = self.is_c_module
         super().__init__(_all_, include_private, export_less, include_docstrings)
         self.module_name = module_name
+        if self.is_c_module:
+            # Add additional implicit imports.
+            # C-extensions are given more lattitude since they do not import the typing module.
+            self.known_imports.update(
+                {
+                    "typing": [
+                        "Any",
+                        "Callable",
+                        "ClassVar",
+                        "Dict",
+                        "Iterable",
+                        "Iterator",
+                        "List",
+                        "NamedTuple",
+                        "Optional",
+                        "Tuple",
+                        "Union",
+                    ]
+                }
+            )
 
     def get_default_function_sig(self, func: object, ctx: FunctionContext) -> FunctionSig:
         argspec = None
@@ -590,9 +612,29 @@ def generate_function_stub(
                 if inferred[0].args and inferred[0].args[0].name == "cls":
                     decorators.append("@classmethod")
 
+        if docstring:
+            docstring = self._indent_docstring(docstring)
         output.extend(self.format_func_def(inferred, decorators=decorators, docstring=docstring))
         self._fix_iter(ctx, inferred, output)
 
+    def _indent_docstring(self, docstring: str) -> str:
+        """Fix indentation of docstring extracted from pybind11 or other binding generators."""
+        lines = docstring.splitlines(keepends=True)
+        indent = self._indent + "    "
+        if len(lines) > 1:
+            if not all(line.startswith(indent) or not line.strip() for line in lines):
+                # if the docstring is not indented, then indent all but the first line
+                for i, line in enumerate(lines[1:]):
+                    if line.strip():
+                        lines[i + 1] = indent + line
+        # if there's a trailing newline, add a final line to visually indent the quoted docstring
+        if lines[-1].endswith("\n"):
+            if len(lines) > 1:
+                lines.append(indent)
+            else:
+                lines[-1] = lines[-1][:-1]
+        return "".join(lines)
+
     def _fix_iter(
         self, ctx: FunctionContext, inferred: list[FunctionSig], output: list[str]
     ) -> None:
@@ -640,7 +682,7 @@ def generate_property_stub(
         if fget:
             alt_docstr = getattr(fget, "__doc__", None)
             if alt_docstr and docstring:
-                docstring += alt_docstr
+                docstring += "\n" + alt_docstr
             elif alt_docstr:
                 docstring = alt_docstr
 
diff --git a/mypy/stubutil.py b/mypy/stubutil.py
index 5ec240087145..b8d601ed3c6b 100644
--- a/mypy/stubutil.py
+++ b/mypy/stubutil.py
@@ -576,6 +576,14 @@ def __init__(
         self.sig_generators = self.get_sig_generators()
         # populated by visit_mypy_file
         self.module_name: str = ""
+        # These are "soft" imports for objects which might appear in annotations but not have
+        # a corresponding import statement.
+        self.known_imports = {
+            "_typeshed": ["Incomplete"],
+            "typing": ["Any", "TypeVar", "NamedTuple"],
+            "collections.abc": ["Generator"],
+            "typing_extensions": ["TypedDict", "ParamSpec", "TypeVarTuple"],
+        }
 
     def get_sig_generators(self) -> list[SignatureGenerator]:
         return []
@@ -667,15 +675,7 @@ def set_defined_names(self, defined_names: set[str]) -> None:
         for name in self._all_ or ():
             self.import_tracker.reexport(name)
 
-        # These are "soft" imports for objects which might appear in annotations but not have
-        # a corresponding import statement.
-        known_imports = {
-            "_typeshed": ["Incomplete"],
-            "typing": ["Any", "TypeVar", "NamedTuple"],
-            "collections.abc": ["Generator"],
-            "typing_extensions": ["TypedDict", "ParamSpec", "TypeVarTuple"],
-        }
-        for pkg, imports in known_imports.items():
+        for pkg, imports in self.known_imports.items():
             for t in imports:
                 # require=False means that the import won't be added unless require_name() is called
                 # for the object during generation.
diff --git a/test-data/pybind11_mypy_demo/src/main.cpp b/test-data/pybind11_mypy_demo/src/main.cpp
index 00e5b2f4e871..192a90cf8e30 100644
--- a/test-data/pybind11_mypy_demo/src/main.cpp
+++ b/test-data/pybind11_mypy_demo/src/main.cpp
@@ -44,6 +44,7 @@
 
 #include <cmath>
 #include <pybind11/pybind11.h>
+#include <pybind11/stl.h>
 
 namespace py = pybind11;
 
@@ -102,6 +103,11 @@ struct Point {
     return distance_to(other.x, other.y);
   }
 
+  std::vector<double> as_vector()
+  {
+    return std::vector<double>{x, y};
+  }
+
   double x, y;
 };
 
@@ -134,14 +140,15 @@ void bind_basics(py::module& basics) {
     .def(py::init<double, double>(), py::arg("x"), py::arg("y"))
     .def("distance_to", py::overload_cast<double, double>(&Point::distance_to, py::const_), py::arg("x"), py::arg("y"))
     .def("distance_to", py::overload_cast<const Point&>(&Point::distance_to, py::const_), py::arg("other"))
-    .def_readwrite("x", &Point::x)
+    .def("as_list", &Point::as_vector)
+    .def_readwrite("x", &Point::x, "some docstring")
     .def_property("y",
         [](Point& self){ return self.y; },
         [](Point& self, double value){ self.y = value; }
     )
     .def_property_readonly("length", &Point::length)
     .def_property_readonly_static("x_axis", [](py::object cls){return Point::x_axis;})
-    .def_property_readonly_static("y_axis", [](py::object cls){return Point::y_axis;})
+    .def_property_readonly_static("y_axis", [](py::object cls){return Point::y_axis;}, "another docstring")
     .def_readwrite_static("length_unit", &Point::length_unit)
     .def_property_static("angle_unit",
         [](py::object& /*cls*/){ return Point::angle_unit; },
diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi
index e69de29bb2d1..0cb252f00259 100644
--- a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi
+++ b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi
@@ -0,0 +1 @@
+from . import basics as basics
diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi
index 676d7f6d3f15..b761291e11f3 100644
--- a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi
+++ b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi
@@ -1,7 +1,7 @@
-from typing import ClassVar
+from typing import ClassVar, List, overload
 
-from typing import overload
 PI: float
+__version__: str
 
 class Point:
     class AngleUnit:
@@ -13,8 +13,6 @@ class Point:
             """__init__(self: pybind11_mypy_demo.basics.Point.AngleUnit, value: int) -> None"""
         def __eq__(self, other: object) -> bool:
             """__eq__(self: object, other: object) -> bool"""
-        def __getstate__(self) -> int:
-            """__getstate__(self: object) -> int"""
         def __hash__(self) -> int:
             """__hash__(self: object) -> int"""
         def __index__(self) -> int:
@@ -23,8 +21,6 @@ class Point:
             """__int__(self: pybind11_mypy_demo.basics.Point.AngleUnit) -> int"""
         def __ne__(self, other: object) -> bool:
             """__ne__(self: object, other: object) -> bool"""
-        def __setstate__(self, state: int) -> None:
-            """__setstate__(self: pybind11_mypy_demo.basics.Point.AngleUnit, state: int) -> None"""
         @property
         def name(self) -> str: ...
         @property
@@ -40,8 +36,6 @@ class Point:
             """__init__(self: pybind11_mypy_demo.basics.Point.LengthUnit, value: int) -> None"""
         def __eq__(self, other: object) -> bool:
             """__eq__(self: object, other: object) -> bool"""
-        def __getstate__(self) -> int:
-            """__getstate__(self: object) -> int"""
         def __hash__(self) -> int:
             """__hash__(self: object) -> int"""
         def __index__(self) -> int:
@@ -50,8 +44,6 @@ class Point:
             """__int__(self: pybind11_mypy_demo.basics.Point.LengthUnit) -> int"""
         def __ne__(self, other: object) -> bool:
             """__ne__(self: object, other: object) -> bool"""
-        def __setstate__(self, state: int) -> None:
-            """__setstate__(self: pybind11_mypy_demo.basics.Point.LengthUnit, state: int) -> None"""
         @property
         def name(self) -> str: ...
         @property
@@ -70,7 +62,8 @@ class Point:
 
         1. __init__(self: pybind11_mypy_demo.basics.Point) -> None
 
-        2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None"""
+        2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None
+        """
     @overload
     def __init__(self, x: float, y: float) -> None:
         """__init__(*args, **kwargs)
@@ -78,7 +71,10 @@ class Point:
 
         1. __init__(self: pybind11_mypy_demo.basics.Point) -> None
 
-        2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None"""
+        2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None
+        """
+    def as_list(self) -> List[float]:
+        """as_list(self: pybind11_mypy_demo.basics.Point) -> List[float]"""
     @overload
     def distance_to(self, x: float, y: float) -> float:
         """distance_to(*args, **kwargs)
@@ -86,7 +82,8 @@ class Point:
 
         1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float
 
-        2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float"""
+        2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float
+        """
     @overload
     def distance_to(self, other: Point) -> float:
         """distance_to(*args, **kwargs)
@@ -94,19 +91,22 @@ class Point:
 
         1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float
 
-        2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float"""
+        2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float
+        """
     @property
     def length(self) -> float: ...
 
 def answer() -> int:
     '''answer() -> int
 
-    answer docstring, with end quote"'''
+    answer docstring, with end quote"
+    '''
 def midpoint(left: float, right: float) -> float:
     """midpoint(left: float, right: float) -> float"""
 def sum(arg0: int, arg1: int) -> int:
     '''sum(arg0: int, arg1: int) -> int
 
-    multiline docstring test, edge case quotes """\'\'\''''
+    multiline docstring test, edge case quotes """\'\'\'
+    '''
 def weighted_midpoint(left: float, right: float, alpha: float = ...) -> float:
     """weighted_midpoint(left: float, right: float, alpha: float = 0.5) -> float"""
diff --git a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi
index 6527f5733eaf..6f164a03edcc 100644
--- a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi
+++ b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi
@@ -1,4 +1,4 @@
-from typing import ClassVar, overload
+from typing import ClassVar, List, overload
 
 PI: float
 __version__: str
@@ -47,6 +47,7 @@ class Point:
     def __init__(self) -> None: ...
     @overload
     def __init__(self, x: float, y: float) -> None: ...
+    def as_list(self) -> List[float]: ...
     @overload
     def distance_to(self, x: float, y: float) -> float: ...
     @overload

From 95e7fcbe9a4c1d366e90c23f44459032cf0740de Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Thu, 30 Nov 2023 17:46:42 +0000
Subject: [PATCH 275/288] Fix stubtest's tests to work with the latest
 typing_extensions release (#16588)

Stubtest's tests will start failing when `typing_extensions==4.9.0`
comes out, due to some new `ClassVar`s on `typing_extensions.TypedDict`.
This PR fixes that.

Fixes https://github.com/python/typing_extensions/issues/309
---
 test-data/unit/lib-stub/typing_extensions.pyi | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi
index c88aa5c815c5..7aca6fad1b42 100644
--- a/test-data/unit/lib-stub/typing_extensions.pyi
+++ b/test-data/unit/lib-stub/typing_extensions.pyi
@@ -59,6 +59,8 @@ class _TypedDict(Mapping[str, object]):
     # Stubtest's tests need the following items:
     __required_keys__: frozenset[str]
     __optional_keys__: frozenset[str]
+    __readonly_keys__: frozenset[str]
+    __mutable_keys__: frozenset[str]
     __total__: bool
 
 def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ...

From c0fce06da362e9660f4aec3b58fb72a9a2dfb7a6 Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Fri, 1 Dec 2023 15:47:57 +0000
Subject: [PATCH 276/288] Sync typeshed

Source commit:
https://github.com/python/typeshed/commit/5f12eebda4bfddb247c05fb06c6762bd262a9420
---
 mypy/typeshed/stdlib/_codecs.pyi              |   4 +-
 mypy/typeshed/stdlib/_ctypes.pyi              |   6 +-
 mypy/typeshed/stdlib/argparse.pyi             |  10 +-
 mypy/typeshed/stdlib/asyncio/base_events.pyi  |   2 +
 .../stdlib/asyncio/base_subprocess.pyi        |   1 +
 mypy/typeshed/stdlib/asyncio/events.pyi       |  15 ++-
 mypy/typeshed/stdlib/asyncio/sslproto.pyi     |   2 +
 mypy/typeshed/stdlib/asyncio/streams.pyi      |   3 +
 mypy/typeshed/stdlib/asyncio/subprocess.pyi   |  18 +--
 mypy/typeshed/stdlib/asyncio/tasks.pyi        |  67 +++++-----
 mypy/typeshed/stdlib/asyncio/unix_events.pyi  | 116 +++++++++++++-----
 mypy/typeshed/stdlib/builtins.pyi             | 105 +++++++++++++++-
 mypy/typeshed/stdlib/cgi.pyi                  |   3 +-
 mypy/typeshed/stdlib/collections/__init__.pyi |   4 +-
 mypy/typeshed/stdlib/contextvars.pyi          |   2 +-
 mypy/typeshed/stdlib/functools.pyi            |  44 ++++---
 mypy/typeshed/stdlib/http/client.pyi          |   2 +-
 mypy/typeshed/stdlib/importlib/abc.pyi        |   2 +-
 mypy/typeshed/stdlib/importlib/machinery.pyi  |  23 +++-
 .../stdlib/importlib/metadata/__init__.pyi    |   2 +-
 mypy/typeshed/stdlib/io.pyi                   |   2 +-
 mypy/typeshed/stdlib/itertools.pyi            |  26 ++--
 mypy/typeshed/stdlib/logging/config.pyi       |  10 +-
 mypy/typeshed/stdlib/math.pyi                 |   2 +-
 .../stdlib/multiprocessing/connection.pyi     |   1 +
 .../stdlib/multiprocessing/managers.pyi       |   1 +
 mypy/typeshed/stdlib/multiprocessing/pool.pyi |   1 +
 .../stdlib/multiprocessing/shared_memory.pyi  |   1 +
 mypy/typeshed/stdlib/os/__init__.pyi          |   2 +-
 mypy/typeshed/stdlib/pkgutil.pyi              |   3 +
 mypy/typeshed/stdlib/re.pyi                   |  14 +--
 mypy/typeshed/stdlib/shelve.pyi               |   3 +-
 mypy/typeshed/stdlib/shutil.pyi               |   8 +-
 mypy/typeshed/stdlib/subprocess.pyi           |   1 +
 mypy/typeshed/stdlib/sunau.pyi                |   2 +
 mypy/typeshed/stdlib/telnetlib.pyi            |   1 +
 mypy/typeshed/stdlib/tempfile.pyi             |   4 +-
 mypy/typeshed/stdlib/tkinter/__init__.pyi     |   1 +
 mypy/typeshed/stdlib/tkinter/dnd.pyi          |   1 +
 mypy/typeshed/stdlib/tkinter/font.pyi         |   1 +
 mypy/typeshed/stdlib/tkinter/ttk.pyi          |   4 +-
 mypy/typeshed/stdlib/turtle.pyi               |  12 +-
 mypy/typeshed/stdlib/types.pyi                |   3 +-
 mypy/typeshed/stdlib/typing.pyi               |  12 +-
 mypy/typeshed/stdlib/typing_extensions.pyi    |  30 +++--
 mypy/typeshed/stdlib/unittest/async_case.pyi  |   2 +
 mypy/typeshed/stdlib/unittest/mock.pyi        |   4 +-
 mypy/typeshed/stdlib/urllib/request.pyi       |   1 +
 mypy/typeshed/stdlib/wave.pyi                 |   9 +-
 mypy/typeshed/stdlib/weakref.pyi              |   2 +-
 mypy/typeshed/stdlib/xml/sax/xmlreader.pyi    |   2 +-
 mypy/typeshed/stdlib/zipfile.pyi              |   8 +-
 52 files changed, 436 insertions(+), 169 deletions(-)

diff --git a/mypy/typeshed/stdlib/_codecs.pyi b/mypy/typeshed/stdlib/_codecs.pyi
index 51f17f01ca71..f8141d8bad4b 100644
--- a/mypy/typeshed/stdlib/_codecs.pyi
+++ b/mypy/typeshed/stdlib/_codecs.pyi
@@ -47,11 +47,11 @@ _StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"]
 @overload
 def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ...
 @overload
-def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ...  # type: ignore[misc]
+def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ...  # type: ignore[overload-overlap]
 @overload
 def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
 @overload
-def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ...  # type: ignore[misc]
+def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ...  # type: ignore[overload-overlap]
 @overload
 def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ...
 
diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi
index 8a891971e9f1..495e29dfd8ce 100644
--- a/mypy/typeshed/stdlib/_ctypes.pyi
+++ b/mypy/typeshed/stdlib/_ctypes.pyi
@@ -167,7 +167,11 @@ class Array(_CData, Generic[_CT]):
     def _type_(self) -> type[_CT]: ...
     @_type_.setter
     def _type_(self, value: type[_CT]) -> None: ...
-    raw: bytes  # Note: only available if _CT == c_char
+    # Note: only available if _CT == c_char
+    @property
+    def raw(self) -> bytes: ...
+    @raw.setter
+    def raw(self, value: ReadableBuffer) -> None: ...
     value: Any  # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
     # TODO These methods cannot be annotated correctly at the moment.
     # All of these "Any"s stand for the array's element type, but it's not possible to use _CT
diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi
index e947f67edd55..0cbbcd242195 100644
--- a/mypy/typeshed/stdlib/argparse.pyi
+++ b/mypy/typeshed/stdlib/argparse.pyi
@@ -172,7 +172,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
         ) -> None: ...
 
     @overload
-    def parse_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ...  # type: ignore[misc]
+    def parse_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ...
     @overload
     def parse_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ...
     @overload
@@ -211,7 +211,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
     def format_usage(self) -> str: ...
     def format_help(self) -> str: ...
     @overload
-    def parse_known_args(self, args: Sequence[str] | None = None, namespace: None = None) -> tuple[Namespace, list[str]]: ...  # type: ignore[misc]
+    def parse_known_args(self, args: Sequence[str] | None = None, namespace: None = None) -> tuple[Namespace, list[str]]: ...
     @overload
     def parse_known_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ...
     @overload
@@ -220,13 +220,15 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
     def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ...
     def error(self, message: str) -> NoReturn: ...
     @overload
-    def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ...  # type: ignore[misc]
+    def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ...
     @overload
     def parse_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ...
     @overload
     def parse_intermixed_args(self, *, namespace: _N) -> _N: ...
     @overload
-    def parse_known_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> tuple[Namespace, list[str]]: ...  # type: ignore[misc]
+    def parse_known_intermixed_args(
+        self, args: Sequence[str] | None = None, namespace: None = None
+    ) -> tuple[Namespace, list[str]]: ...
     @overload
     def parse_known_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ...
     @overload
diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi
index cdf295d510d4..afddcd918584 100644
--- a/mypy/typeshed/stdlib/asyncio/base_events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi
@@ -471,3 +471,5 @@ class BaseEventLoop(AbstractEventLoop):
         async def shutdown_default_executor(self, timeout: float | None = None) -> None: ...
     elif sys.version_info >= (3, 9):
         async def shutdown_default_executor(self) -> None: ...
+
+    def __del__(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi b/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi
index 8f262cd5c760..a5fe24e8768b 100644
--- a/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi
+++ b/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi
@@ -55,6 +55,7 @@ class BaseSubprocessTransport(transports.SubprocessTransport):
     async def _wait(self) -> int: ...  # undocumented
     def _try_finish(self) -> None: ...  # undocumented
     def _call_connection_lost(self, exc: BaseException | None) -> None: ...  # undocumented
+    def __del__(self) -> None: ...
 
 class WriteSubprocessPipeProto(protocols.BaseProtocol):  # undocumented
     def __init__(self, proc: BaseSubprocessTransport, fd: int) -> None: ...
diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi
index 4c62043875ba..87e7edb461ac 100644
--- a/mypy/typeshed/stdlib/asyncio/events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/events.pyi
@@ -6,7 +6,7 @@ from collections.abc import Callable, Coroutine, Generator, Sequence
 from contextvars import Context
 from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket
 from typing import IO, Any, Protocol, TypeVar, overload
-from typing_extensions import Literal, Self, TypeAlias
+from typing_extensions import Literal, Self, TypeAlias, deprecated
 
 from . import _AwaitableLike, _CoroutineLike
 from .base_events import Server
@@ -613,8 +613,17 @@ def set_event_loop_policy(policy: AbstractEventLoopPolicy | None) -> None: ...
 def get_event_loop() -> AbstractEventLoop: ...
 def set_event_loop(loop: AbstractEventLoop | None) -> None: ...
 def new_event_loop() -> AbstractEventLoop: ...
-def get_child_watcher() -> AbstractChildWatcher: ...
-def set_child_watcher(watcher: AbstractChildWatcher) -> None: ...
+
+if sys.version_info >= (3, 12):
+    @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
+    def get_child_watcher() -> AbstractChildWatcher: ...
+    @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
+    def set_child_watcher(watcher: AbstractChildWatcher) -> None: ...
+
+else:
+    def get_child_watcher() -> AbstractChildWatcher: ...
+    def set_child_watcher(watcher: AbstractChildWatcher) -> None: ...
+
 def _set_running_loop(__loop: AbstractEventLoop | None) -> None: ...
 def _get_running_loop() -> AbstractEventLoop: ...
 def get_running_loop() -> AbstractEventLoop: ...
diff --git a/mypy/typeshed/stdlib/asyncio/sslproto.pyi b/mypy/typeshed/stdlib/asyncio/sslproto.pyi
index 09733e5f9a01..393a1fbdc468 100644
--- a/mypy/typeshed/stdlib/asyncio/sslproto.pyi
+++ b/mypy/typeshed/stdlib/asyncio/sslproto.pyi
@@ -83,6 +83,8 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport):
         def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ...
         def get_read_buffer_size(self) -> int: ...
 
+    def __del__(self) -> None: ...
+
 if sys.version_info >= (3, 11):
     _SSLProtocolBase: TypeAlias = protocols.BufferedProtocol
 else:
diff --git a/mypy/typeshed/stdlib/asyncio/streams.pyi b/mypy/typeshed/stdlib/asyncio/streams.pyi
index 804be1ca5065..81a94425f8de 100644
--- a/mypy/typeshed/stdlib/asyncio/streams.pyi
+++ b/mypy/typeshed/stdlib/asyncio/streams.pyi
@@ -128,6 +128,7 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
         client_connected_cb: _ClientConnectedCallback | None = None,
         loop: events.AbstractEventLoop | None = None,
     ) -> None: ...
+    def __del__(self) -> None: ...
 
 class StreamWriter:
     def __init__(
@@ -161,6 +162,8 @@ class StreamWriter:
         async def start_tls(
             self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None
         ) -> None: ...
+    if sys.version_info >= (3, 11):
+        def __del__(self) -> None: ...
 
 class StreamReader(AsyncIterator[bytes]):
     def __init__(self, limit: int = 65536, loop: events.AbstractEventLoop | None = None) -> None: ...
diff --git a/mypy/typeshed/stdlib/asyncio/subprocess.pyi b/mypy/typeshed/stdlib/asyncio/subprocess.pyi
index 9b7c82e689bf..03aea65f6d54 100644
--- a/mypy/typeshed/stdlib/asyncio/subprocess.pyi
+++ b/mypy/typeshed/stdlib/asyncio/subprocess.pyi
@@ -80,14 +80,14 @@ if sys.version_info >= (3, 11):
         stdout: int | IO[Any] | None = None,
         stderr: int | IO[Any] | None = None,
         limit: int = 65536,
-        # These parameters are forced to these values by BaseEventLoop.subprocess_shell
+        # These parameters are forced to these values by BaseEventLoop.subprocess_exec
         universal_newlines: Literal[False] = False,
-        shell: Literal[True] = True,
+        shell: Literal[False] = False,
         bufsize: Literal[0] = 0,
         encoding: None = None,
         errors: None = None,
+        text: Literal[False] | None = None,
         # These parameters are taken by subprocess.Popen, which this ultimately delegates to
-        text: bool | None = None,
         executable: StrOrBytesPath | None = None,
         preexec_fn: Callable[[], Any] | None = None,
         close_fds: bool = True,
@@ -145,14 +145,14 @@ elif sys.version_info >= (3, 10):
         stdout: int | IO[Any] | None = None,
         stderr: int | IO[Any] | None = None,
         limit: int = 65536,
-        # These parameters are forced to these values by BaseEventLoop.subprocess_shell
+        # These parameters are forced to these values by BaseEventLoop.subprocess_exec
         universal_newlines: Literal[False] = False,
-        shell: Literal[True] = True,
+        shell: Literal[False] = False,
         bufsize: Literal[0] = 0,
         encoding: None = None,
         errors: None = None,
+        text: Literal[False] | None = None,
         # These parameters are taken by subprocess.Popen, which this ultimately delegates to
-        text: bool | None = None,
         executable: StrOrBytesPath | None = None,
         preexec_fn: Callable[[], Any] | None = None,
         close_fds: bool = True,
@@ -210,14 +210,14 @@ else:  # >= 3.9
         stderr: int | IO[Any] | None = None,
         loop: events.AbstractEventLoop | None = None,
         limit: int = 65536,
-        # These parameters are forced to these values by BaseEventLoop.subprocess_shell
+        # These parameters are forced to these values by BaseEventLoop.subprocess_exec
         universal_newlines: Literal[False] = False,
-        shell: Literal[True] = True,
+        shell: Literal[False] = False,
         bufsize: Literal[0] = 0,
         encoding: None = None,
         errors: None = None,
+        text: Literal[False] | None = None,
         # These parameters are taken by subprocess.Popen, which this ultimately delegates to
-        text: bool | None = None,
         executable: StrOrBytesPath | None = None,
         preexec_fn: Callable[[], Any] | None = None,
         close_fds: bool = True,
diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi
index cdac7d359781..7c76abaf1dca 100644
--- a/mypy/typeshed/stdlib/asyncio/tasks.pyi
+++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi
@@ -86,7 +86,7 @@ else:
     ) -> Iterator[Future[_T]]: ...
 
 @overload
-def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ...  # type: ignore[misc]
+def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ...  # type: ignore[overload-overlap]
 @overload
 def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ...
 
@@ -95,17 +95,16 @@ def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | No
 # zip() because typing does not support variadic type variables.  See
 # typing PR #1550 for discussion.
 #
-# The many type: ignores here are because the overloads overlap,
-# but having overlapping overloads is the only way to get acceptable type inference in all edge cases.
+# N.B. Having overlapping overloads is the only way to get acceptable type inference in all edge cases.
 if sys.version_info >= (3, 10):
     @overload
-    def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ...  # type: ignore[misc]
+    def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ...  # type: ignore[overload-overlap]
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: Literal[False] = False
     ) -> Future[tuple[_T1, _T2]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -113,7 +112,7 @@ if sys.version_info >= (3, 10):
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -122,7 +121,7 @@ if sys.version_info >= (3, 10):
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -132,7 +131,7 @@ if sys.version_info >= (3, 10):
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -143,15 +142,15 @@ if sys.version_info >= (3, 10):
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ...
     @overload
-    def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: Literal[False] = False) -> Future[list[_T]]: ...  # type: ignore[misc]
+    def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: Literal[False] = False) -> Future[list[_T]]: ...  # type: ignore[overload-overlap]
     @overload
-    def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ...  # type: ignore[misc]
+    def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ...  # type: ignore[overload-overlap]
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: bool
     ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -159,7 +158,7 @@ if sys.version_info >= (3, 10):
         return_exceptions: bool,
     ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -168,7 +167,7 @@ if sys.version_info >= (3, 10):
         return_exceptions: bool,
     ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -180,7 +179,7 @@ if sys.version_info >= (3, 10):
         tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException]
     ]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -204,11 +203,11 @@ if sys.version_info >= (3, 10):
 
 else:
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False
     ) -> Future[tuple[_T1]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         *,
@@ -216,7 +215,7 @@ else:
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -225,7 +224,7 @@ else:
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -235,7 +234,7 @@ else:
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -246,7 +245,7 @@ else:
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -258,15 +257,15 @@ else:
         return_exceptions: Literal[False] = False,
     ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         *coros_or_futures: _FutureLike[_T], loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False
     ) -> Future[list[_T]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: bool
     ) -> Future[tuple[_T1 | BaseException]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         *,
@@ -274,7 +273,7 @@ else:
         return_exceptions: bool,
     ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -283,7 +282,7 @@ else:
         return_exceptions: bool,
     ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -293,7 +292,7 @@ else:
         return_exceptions: bool,
     ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(  # type: ignore[overload-overlap]
         __coro_or_future1: _FutureLike[_T1],
         __coro_or_future2: _FutureLike[_T2],
         __coro_or_future3: _FutureLike[_T3],
@@ -314,7 +313,7 @@ else:
         ]
     ]: ...
     @overload
-    def gather(  # type: ignore[misc]
+    def gather(
         *coros_or_futures: _FutureLike[_T], loop: AbstractEventLoop | None = None, return_exceptions: bool
     ) -> Future[list[_T | BaseException]]: ...
 
@@ -338,7 +337,9 @@ else:
 
 if sys.version_info >= (3, 11):
     @overload
-    async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ...  # type: ignore[misc]
+    async def wait(
+        fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED"
+    ) -> tuple[set[_FT], set[_FT]]: ...
     @overload
     async def wait(
         fs: Iterable[Task[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED"
@@ -346,7 +347,9 @@ if sys.version_info >= (3, 11):
 
 elif sys.version_info >= (3, 10):
     @overload
-    async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ...  # type: ignore[misc]
+    async def wait(  # type: ignore[overload-overlap]
+        fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED"
+    ) -> tuple[set[_FT], set[_FT]]: ...
     @overload
     async def wait(
         fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED"
@@ -354,7 +357,7 @@ elif sys.version_info >= (3, 10):
 
 else:
     @overload
-    async def wait(  # type: ignore[misc]
+    async def wait(  # type: ignore[overload-overlap]
         fs: Iterable[_FT],
         *,
         loop: AbstractEventLoop | None = None,
diff --git a/mypy/typeshed/stdlib/asyncio/unix_events.pyi b/mypy/typeshed/stdlib/asyncio/unix_events.pyi
index dc3d3496ae55..d440206aa0b9 100644
--- a/mypy/typeshed/stdlib/asyncio/unix_events.pyi
+++ b/mypy/typeshed/stdlib/asyncio/unix_events.pyi
@@ -3,7 +3,7 @@ import types
 from abc import ABCMeta, abstractmethod
 from collections.abc import Callable
 from typing import Any
-from typing_extensions import Literal, Self
+from typing_extensions import Literal, Self, deprecated
 
 from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy
 from .selector_events import BaseSelectorEventLoop
@@ -11,22 +11,46 @@ from .selector_events import BaseSelectorEventLoop
 # This is also technically not available on Win,
 # but other parts of typeshed need this definition.
 # So, it is special cased.
-class AbstractChildWatcher:
-    @abstractmethod
-    def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
-    @abstractmethod
-    def remove_child_handler(self, pid: int) -> bool: ...
-    @abstractmethod
-    def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
-    @abstractmethod
-    def close(self) -> None: ...
-    @abstractmethod
-    def __enter__(self) -> Self: ...
-    @abstractmethod
-    def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ...
-    if sys.version_info >= (3, 8):
+if sys.version_info >= (3, 12):
+    @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
+    class AbstractChildWatcher:
+        @abstractmethod
+        def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
+        @abstractmethod
+        def remove_child_handler(self, pid: int) -> bool: ...
+        @abstractmethod
+        def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
+        @abstractmethod
+        def close(self) -> None: ...
+        @abstractmethod
+        def __enter__(self) -> Self: ...
+        @abstractmethod
+        def __exit__(
+            self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None
+        ) -> None: ...
+        if sys.version_info >= (3, 8):
+            @abstractmethod
+            def is_active(self) -> bool: ...
+
+else:
+    class AbstractChildWatcher:
         @abstractmethod
-        def is_active(self) -> bool: ...
+        def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
+        @abstractmethod
+        def remove_child_handler(self, pid: int) -> bool: ...
+        @abstractmethod
+        def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
+        @abstractmethod
+        def close(self) -> None: ...
+        @abstractmethod
+        def __enter__(self) -> Self: ...
+        @abstractmethod
+        def __exit__(
+            self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None
+        ) -> None: ...
+        if sys.version_info >= (3, 8):
+            @abstractmethod
+            def is_active(self) -> bool: ...
 
 if sys.platform != "win32":
     if sys.version_info >= (3, 9):
@@ -62,28 +86,61 @@ if sys.platform != "win32":
 
         def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
 
-    class SafeChildWatcher(BaseChildWatcher):
-        def __enter__(self) -> Self: ...
-        def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
-        def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
-        def remove_child_handler(self, pid: int) -> bool: ...
+    if sys.version_info >= (3, 12):
+        @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
+        class SafeChildWatcher(BaseChildWatcher):
+            def __enter__(self) -> Self: ...
+            def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
+            def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
+            def remove_child_handler(self, pid: int) -> bool: ...
 
-    class FastChildWatcher(BaseChildWatcher):
-        def __enter__(self) -> Self: ...
-        def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
-        def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
-        def remove_child_handler(self, pid: int) -> bool: ...
+        @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
+        class FastChildWatcher(BaseChildWatcher):
+            def __enter__(self) -> Self: ...
+            def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
+            def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
+            def remove_child_handler(self, pid: int) -> bool: ...
+    else:
+        class SafeChildWatcher(BaseChildWatcher):
+            def __enter__(self) -> Self: ...
+            def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
+            def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
+            def remove_child_handler(self, pid: int) -> bool: ...
+
+        class FastChildWatcher(BaseChildWatcher):
+            def __enter__(self) -> Self: ...
+            def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ...
+            def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
+            def remove_child_handler(self, pid: int) -> bool: ...
 
     class _UnixSelectorEventLoop(BaseSelectorEventLoop): ...
 
     class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy):
-        def get_child_watcher(self) -> AbstractChildWatcher: ...
-        def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ...
+        if sys.version_info >= (3, 12):
+            @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
+            def get_child_watcher(self) -> AbstractChildWatcher: ...
+            @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
+            def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ...
+        else:
+            def get_child_watcher(self) -> AbstractChildWatcher: ...
+            def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ...
     SelectorEventLoop = _UnixSelectorEventLoop
 
     DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy
 
-    if sys.version_info >= (3, 8):
+    if sys.version_info >= (3, 12):
+        @deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
+        class MultiLoopChildWatcher(AbstractChildWatcher):
+            def is_active(self) -> bool: ...
+            def close(self) -> None: ...
+            def __enter__(self) -> Self: ...
+            def __exit__(
+                self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
+            ) -> None: ...
+            def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ...
+            def remove_child_handler(self, pid: int) -> bool: ...
+            def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
+    elif sys.version_info >= (3, 8):
         class MultiLoopChildWatcher(AbstractChildWatcher):
             def is_active(self) -> bool: ...
             def close(self) -> None: ...
@@ -95,6 +152,7 @@ if sys.platform != "win32":
             def remove_child_handler(self, pid: int) -> bool: ...
             def attach_loop(self, loop: AbstractEventLoop | None) -> None: ...
 
+    if sys.version_info >= (3, 8):
         class ThreadedChildWatcher(AbstractChildWatcher):
             def is_active(self) -> Literal[True]: ...
             def close(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index 4f04b6286258..92b5279bcfcd 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -57,12 +57,14 @@ from typing import (  # noqa: Y022
 from typing_extensions import (
     Concatenate,
     Literal,
+    LiteralString,
     ParamSpec,
     Self,
     SupportsIndex,
     TypeAlias,
     TypeGuard,
     TypeVarTuple,
+    deprecated,
     final,
 )
 
@@ -160,8 +162,9 @@ class classmethod(Generic[_T, _P, _R_co]):
         def __wrapped__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ...
 
 class type:
+    # object.__base__ is None. Otherwise, it would be a type.
     @property
-    def __base__(self) -> type: ...
+    def __base__(self) -> type | None: ...
     __bases__: tuple[type, ...]
     @property
     def __basicsize__(self) -> int: ...
@@ -442,8 +445,17 @@ class str(Sequence[str]):
     def __new__(cls, object: object = ...) -> Self: ...
     @overload
     def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ...
+    @overload
+    def capitalize(self: LiteralString) -> LiteralString: ...
+    @overload
     def capitalize(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def casefold(self: LiteralString) -> LiteralString: ...
+    @overload
     def casefold(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
+    @overload
     def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
     def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
@@ -451,11 +463,20 @@ class str(Sequence[str]):
         self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
     if sys.version_info >= (3, 8):
+        @overload
+        def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ...
+        @overload
         def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ...  # type: ignore[misc]
     else:
+        @overload
+        def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ...
+        @overload
         def expandtabs(self, tabsize: int = 8) -> str: ...  # type: ignore[misc]
 
     def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
+    @overload
+    def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ...
+    @overload
     def format(self, *args: object, **kwargs: object) -> str: ...
     def format_map(self, map: _FormatMapMapping) -> str: ...
     def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
@@ -471,32 +492,91 @@ class str(Sequence[str]):
     def isspace(self) -> bool: ...
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
+    @overload
+    def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ...
+    @overload
     def join(self, __iterable: Iterable[str]) -> str: ...  # type: ignore[misc]
+    @overload
+    def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
+    @overload
     def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
+    @overload
+    def lower(self: LiteralString) -> LiteralString: ...
+    @overload
     def lower(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
+    @overload
     def lstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
+    @overload
+    def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
+    @overload
     def partition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
+    @overload
+    def replace(
+        self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1
+    ) -> LiteralString: ...
+    @overload
     def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ...  # type: ignore[misc]
     if sys.version_info >= (3, 9):
+        @overload
+        def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ...
+        @overload
         def removeprefix(self, __prefix: str) -> str: ...  # type: ignore[misc]
+        @overload
+        def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ...
+        @overload
         def removesuffix(self, __suffix: str) -> str: ...  # type: ignore[misc]
 
     def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
+    @overload
+    def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
+    @overload
     def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
+    @overload
+    def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
+    @overload
     def rpartition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
+    @overload
+    def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
+    @overload
     def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
+    @overload
+    def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
+    @overload
     def rstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
+    @overload
+    def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
+    @overload
     def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
+    @overload
+    def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ...
+    @overload
     def splitlines(self, keepends: bool = False) -> list[str]: ...  # type: ignore[misc]
     def startswith(
         self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
+    @overload
+    def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
+    @overload
     def strip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
+    @overload
+    def swapcase(self: LiteralString) -> LiteralString: ...
+    @overload
     def swapcase(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def title(self: LiteralString) -> LiteralString: ...
+    @overload
     def title(self) -> str: ...  # type: ignore[misc]
     def translate(self, __table: _TranslateTable) -> str: ...
+    @overload
+    def upper(self: LiteralString) -> LiteralString: ...
+    @overload
     def upper(self) -> str: ...  # type: ignore[misc]
+    @overload
+    def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ...
+    @overload
     def zfill(self, __width: SupportsIndex) -> str: ...  # type: ignore[misc]
     @staticmethod
     @overload
@@ -507,6 +587,9 @@ class str(Sequence[str]):
     @staticmethod
     @overload
     def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ...
+    @overload
+    def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ...
+    @overload
     def __add__(self, __value: str) -> str: ...  # type: ignore[misc]
     # Incompatible with Sequence.__contains__
     def __contains__(self, __key: str) -> bool: ...  # type: ignore[override]
@@ -515,13 +598,25 @@ class str(Sequence[str]):
     def __getitem__(self, __key: SupportsIndex | slice) -> str: ...
     def __gt__(self, __value: str) -> bool: ...
     def __hash__(self) -> int: ...
+    @overload
+    def __iter__(self: LiteralString) -> Iterator[LiteralString]: ...
+    @overload
     def __iter__(self) -> Iterator[str]: ...  # type: ignore[misc]
     def __le__(self, __value: str) -> bool: ...
     def __len__(self) -> int: ...
     def __lt__(self, __value: str) -> bool: ...
+    @overload
+    def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ...
+    @overload
     def __mod__(self, __value: Any) -> str: ...
+    @overload
+    def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
+    @overload
     def __mul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __ne__(self, __value: object) -> bool: ...
+    @overload
+    def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
+    @overload
     def __rmul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __getnewargs__(self) -> tuple[str]: ...
 
@@ -844,6 +939,8 @@ class bool(int):
     @overload
     def __rxor__(self, __value: int) -> int: ...
     def __getnewargs__(self) -> tuple[int]: ...
+    @deprecated("Will throw an error in Python 3.14. Use `not` for logical negation of bools instead.")
+    def __invert__(self) -> int: ...
 
 @final
 class slice:
@@ -1698,11 +1795,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit
 # Instead, we special-case the most common examples of this: bool and literal integers.
 if sys.version_info >= (3, 8):
     @overload
-    def sum(__iterable: Iterable[bool], start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ...  # type: ignore[overload-overlap]
 
 else:
     @overload
-    def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ...  # type: ignore[misc]
+    def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ...  # type: ignore[overload-overlap]
 
 @overload
 def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ...
@@ -1719,7 +1816,7 @@ else:
 # (A "SupportsDunderDict" protocol doesn't work)
 # Use a type: ignore to make complaints about overlapping overloads go away
 @overload
-def vars(__object: type) -> types.MappingProxyType[str, Any]: ...  # type: ignore[misc]
+def vars(__object: type) -> types.MappingProxyType[str, Any]: ...  # type: ignore[overload-overlap]
 @overload
 def vars(__object: Any = ...) -> dict[str, Any]: ...
 
diff --git a/mypy/typeshed/stdlib/cgi.pyi b/mypy/typeshed/stdlib/cgi.pyi
index a2acfa92d463..21bf8ca25394 100644
--- a/mypy/typeshed/stdlib/cgi.pyi
+++ b/mypy/typeshed/stdlib/cgi.pyi
@@ -117,7 +117,8 @@ class FieldStorage:
     def __contains__(self, key: str) -> bool: ...
     def __len__(self) -> int: ...
     def __bool__(self) -> bool: ...
-    # In Python 3 it returns bytes or str IO depending on an internal flag
+    def __del__(self) -> None: ...
+    # Returns bytes or str IO depending on an internal flag
     def make_file(self) -> IO[Any]: ...
 
 def print_exception(
diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi
index bb51dec50cab..955681c6ac0c 100644
--- a/mypy/typeshed/stdlib/collections/__init__.pyi
+++ b/mypy/typeshed/stdlib/collections/__init__.pyi
@@ -87,9 +87,9 @@ class UserDict(MutableMapping[_KT, _VT]):
         def __or__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ...
         @overload
         def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ...
-        @overload  # type: ignore[misc]
+        @overload
         def __ror__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ...
-        @overload  # type: ignore[misc]
+        @overload
         def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ...
         # UserDict.__ior__ should be kept roughly in line with MutableMapping.update()
         @overload  # type: ignore[misc]
diff --git a/mypy/typeshed/stdlib/contextvars.pyi b/mypy/typeshed/stdlib/contextvars.pyi
index a67d0349b46a..825c018d580f 100644
--- a/mypy/typeshed/stdlib/contextvars.pyi
+++ b/mypy/typeshed/stdlib/contextvars.pyi
@@ -50,7 +50,7 @@ def copy_context() -> Context: ...
 class Context(Mapping[ContextVar[Any], Any]):
     def __init__(self) -> None: ...
     @overload
-    def get(self, __key: ContextVar[_T], __default: None = None) -> _T | None: ...  # type: ignore[misc]  # overlapping overloads
+    def get(self, __key: ContextVar[_T], __default: None = None) -> _T | None: ...
     @overload
     def get(self, __key: ContextVar[_T], __default: _T) -> _T: ...
     @overload
diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi
index 1b4e59b7c120..451896bed72a 100644
--- a/mypy/typeshed/stdlib/functools.pyi
+++ b/mypy/typeshed/stdlib/functools.pyi
@@ -1,9 +1,9 @@
 import sys
 import types
-from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems
+from _typeshed import SupportsAllComparisons, SupportsItems
 from collections.abc import Callable, Hashable, Iterable, Sequence, Sized
 from typing import Any, Generic, NamedTuple, TypeVar, overload
-from typing_extensions import Literal, Self, TypeAlias, TypedDict, final
+from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final
 
 if sys.version_info >= (3, 9):
     from types import GenericAlias
@@ -28,15 +28,17 @@ if sys.version_info >= (3, 8):
 if sys.version_info >= (3, 9):
     __all__ += ["cache"]
 
-_AnyCallable: TypeAlias = Callable[..., object]
-
 _T = TypeVar("_T")
 _S = TypeVar("_S")
+_PWrapped = ParamSpec("_PWrapped")
+_RWrapped = TypeVar("_RWrapped")
+_PWrapper = ParamSpec("_PWrapper")
+_RWrapper = TypeVar("_RWrapper")
 
 @overload
-def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ...
+def reduce(__function: Callable[[_T, _S], _T], __sequence: Iterable[_S], __initial: _T) -> _T: ...
 @overload
-def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ...
+def reduce(__function: Callable[[_T, _T], _T], __sequence: Iterable[_T]) -> _T: ...
 
 class _CacheInfo(NamedTuple):
     hits: int
@@ -85,31 +87,41 @@ else:
     ]
 WRAPPER_UPDATES: tuple[Literal["__dict__"]]
 
+class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWrapper]):
+    __wrapped__: Callable[_PWrapped, _RWrapped]
+    def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWrapper: ...
+    # as with ``Callable``, we'll assume that these attributes exist
+    __name__: str
+    __qualname__: str
+
+class _Wrapper(Generic[_PWrapped, _RWrapped]):
+    def __call__(self, f: Callable[_PWrapper, _RWrapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
+
 if sys.version_info >= (3, 12):
     def update_wrapper(
-        wrapper: _T,
-        wrapped: _AnyCallable,
+        wrapper: Callable[_PWrapper, _RWrapper],
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _T: ...
+    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
     def wraps(
-        wrapped: _AnyCallable,
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> IdentityFunction: ...
+    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
 
 else:
     def update_wrapper(
-        wrapper: _T,
-        wrapped: _AnyCallable,
+        wrapper: Callable[_PWrapper, _RWrapper],
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _T: ...
+    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
     def wraps(
-        wrapped: _AnyCallable,
+        wrapped: Callable[_PWrapped, _RWrapped],
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> IdentityFunction: ...
+    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
 
 def total_ordering(cls: type[_T]) -> type[_T]: ...
 def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ...
diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi
index 305568fce6cf..20223695a1a8 100644
--- a/mypy/typeshed/stdlib/http/client.pyi
+++ b/mypy/typeshed/stdlib/http/client.pyi
@@ -187,7 +187,7 @@ class HTTPSConnection(HTTPConnection):
         def __init__(
             self,
             host: str,
-            port: str | None = None,
+            port: int | None = None,
             *,
             timeout: float | None = ...,
             source_address: tuple[str, int] | None = None,
diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi
index 8c395f8a18af..148e12ec7e3f 100644
--- a/mypy/typeshed/stdlib/importlib/abc.pyi
+++ b/mypy/typeshed/stdlib/importlib/abc.pyi
@@ -134,7 +134,7 @@ if sys.version_info >= (3, 9):
             def joinpath(self, __child: str) -> Traversable: ...
 
         # The documentation and runtime protocol allows *args, **kwargs arguments,
-        # but this would mean that all implementors would have to support them,
+        # but this would mean that all implementers would have to support them,
         # which is not the case.
         @overload
         @abstractmethod
diff --git a/mypy/typeshed/stdlib/importlib/machinery.pyi b/mypy/typeshed/stdlib/importlib/machinery.pyi
index 1a9680ab3c46..a0431905a828 100644
--- a/mypy/typeshed/stdlib/importlib/machinery.pyi
+++ b/mypy/typeshed/stdlib/importlib/machinery.pyi
@@ -2,8 +2,9 @@ import importlib.abc
 import sys
 import types
 from _typeshed import ReadableBuffer
-from collections.abc import Callable, Iterable, Sequence
+from collections.abc import Callable, Iterable, MutableSequence, Sequence
 from typing import Any
+from typing_extensions import Literal, deprecated
 
 if sys.version_info >= (3, 8):
     from importlib.metadata import DistributionFinder, PathDistribution
@@ -158,3 +159,23 @@ class ExtensionFileLoader(importlib.abc.ExecutionLoader):
     def get_code(self, fullname: str) -> None: ...
     def __eq__(self, other: object) -> bool: ...
     def __hash__(self) -> int: ...
+
+if sys.version_info >= (3, 11):
+    import importlib.readers
+
+    class NamespaceLoader(importlib.abc.InspectLoader):
+        def __init__(
+            self, name: str, path: MutableSequence[str], path_finder: Callable[[str, tuple[str, ...]], ModuleSpec]
+        ) -> None: ...
+        def is_package(self, fullname: str) -> Literal[True]: ...
+        def get_source(self, fullname: str) -> Literal[""]: ...
+        def get_code(self, fullname: str) -> types.CodeType: ...
+        def create_module(self, spec: ModuleSpec) -> None: ...
+        def exec_module(self, module: types.ModuleType) -> None: ...
+        @deprecated("load_module() is deprecated; use exec_module() instead")
+        def load_module(self, fullname: str) -> types.ModuleType: ...
+        def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.NamespaceReader: ...
+        if sys.version_info < (3, 12):
+            @staticmethod
+            @deprecated("module_repr() is deprecated, and has been removed in Python 3.12")
+            def module_repr(module: types.ModuleType) -> str: ...
diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
index e52756544e9a..fd470b8f061d 100644
--- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
+++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi
@@ -207,7 +207,7 @@ if sys.version_info >= (3, 12):
 
 elif sys.version_info >= (3, 10):
     @overload
-    def entry_points() -> SelectableGroups: ...  # type: ignore[misc]
+    def entry_points() -> SelectableGroups: ...  # type: ignore[overload-overlap]
     @overload
     def entry_points(
         *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ...
diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi
index b54e0a9fd05b..16270b948f35 100644
--- a/mypy/typeshed/stdlib/io.pyi
+++ b/mypy/typeshed/stdlib/io.pyi
@@ -94,7 +94,7 @@ class BufferedIOBase(IOBase):
 
 class FileIO(RawIOBase, BinaryIO):  # type: ignore[misc]  # incompatible definitions of writelines in the base classes
     mode: str
-    name: FileDescriptorOrPath  # type: ignore[assignment]
+    name: FileDescriptorOrPath
     def __init__(
         self, file: FileDescriptorOrPath, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ...
     ) -> None: ...
diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi
index 55f9c92d8cac..1bc0b2ec7390 100644
--- a/mypy/typeshed/stdlib/itertools.pyi
+++ b/mypy/typeshed/stdlib/itertools.pyi
@@ -10,6 +10,7 @@ _T = TypeVar("_T")
 _S = TypeVar("_S")
 _N = TypeVar("_N", int, float, SupportsFloat, SupportsInt, SupportsIndex, SupportsComplex)
 _T_co = TypeVar("_T_co", covariant=True)
+_S_co = TypeVar("_S_co", covariant=True)
 _T1 = TypeVar("_T1")
 _T2 = TypeVar("_T2")
 _T3 = TypeVar("_T3")
@@ -84,13 +85,13 @@ class filterfalse(Iterator[_T]):
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
 
-class groupby(Iterator[tuple[_T, Iterator[_S]]], Generic[_T, _S]):
+class groupby(Iterator[tuple[_T_co, Iterator[_S_co]]], Generic[_T_co, _S_co]):
     @overload
     def __new__(cls, iterable: Iterable[_T1], key: None = None) -> groupby[_T1, _T1]: ...
     @overload
     def __new__(cls, iterable: Iterable[_T1], key: Callable[[_T1], _T2]) -> groupby[_T2, _T1]: ...
     def __iter__(self) -> Self: ...
-    def __next__(self) -> tuple[_T, Iterator[_S]]: ...
+    def __next__(self) -> tuple[_T_co, Iterator[_S_co]]: ...
 
 class islice(Iterator[_T]):
     @overload
@@ -100,10 +101,10 @@ class islice(Iterator[_T]):
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T: ...
 
-class starmap(Iterator[_T]):
-    def __init__(self, __function: Callable[..., _T], __iterable: Iterable[Iterable[Any]]) -> None: ...
+class starmap(Iterator[_T_co]):
+    def __new__(cls, __function: Callable[..., _T], __iterable: Iterable[Iterable[Any]]) -> starmap[_T]: ...
     def __iter__(self) -> Self: ...
-    def __next__(self) -> _T: ...
+    def __next__(self) -> _T_co: ...
 
 class takewhile(Iterator[_T]):
     def __init__(self, __predicate: _Predicate[_T], __iterable: Iterable[_T]) -> None: ...
@@ -269,10 +270,19 @@ class combinations(Iterator[_T_co]):
     def __iter__(self) -> Self: ...
     def __next__(self) -> _T_co: ...
 
-class combinations_with_replacement(Iterator[tuple[_T, ...]], Generic[_T]):
-    def __init__(self, iterable: Iterable[_T], r: int) -> None: ...
+class combinations_with_replacement(Iterator[_T_co]):
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations_with_replacement[tuple[_T, _T]]: ...
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: Literal[3]) -> combinations_with_replacement[tuple[_T, _T, _T]]: ...
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: Literal[4]) -> combinations_with_replacement[tuple[_T, _T, _T, _T]]: ...
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations_with_replacement[tuple[_T, _T, _T, _T, _T]]: ...
+    @overload
+    def __new__(cls, iterable: Iterable[_T], r: int) -> combinations_with_replacement[tuple[_T, ...]]: ...
     def __iter__(self) -> Self: ...
-    def __next__(self) -> tuple[_T, ...]: ...
+    def __next__(self) -> _T_co: ...
 
 if sys.version_info >= (3, 10):
     class pairwise(Iterator[_T_co]):
diff --git a/mypy/typeshed/stdlib/logging/config.pyi b/mypy/typeshed/stdlib/logging/config.pyi
index e92658f7f1b3..0a61e5b16870 100644
--- a/mypy/typeshed/stdlib/logging/config.pyi
+++ b/mypy/typeshed/stdlib/logging/config.pyi
@@ -5,7 +5,7 @@ from configparser import RawConfigParser
 from re import Pattern
 from threading import Thread
 from typing import IO, Any, overload
-from typing_extensions import Literal, SupportsIndex, TypeAlias, TypedDict
+from typing_extensions import Literal, Required, SupportsIndex, TypeAlias, TypedDict
 
 from . import Filter, Filterer, Formatter, Handler, Logger, _FilterType, _FormatStyle, _Level
 
@@ -50,18 +50,16 @@ _FilterConfiguration: TypeAlias = _FilterConfigurationTypedDict | dict[str, Any]
 # Handler config can have additional keys even when not providing a custom factory so we just use `dict`.
 _HandlerConfiguration: TypeAlias = dict[str, Any]
 
-class _OptionalDictConfigArgs(TypedDict, total=False):
+class _DictConfigArgs(TypedDict, total=False):
+    version: Required[Literal[1]]
     formatters: dict[str, _FormatterConfiguration]
     filters: dict[str, _FilterConfiguration]
     handlers: dict[str, _HandlerConfiguration]
     loggers: dict[str, _LoggerConfiguration]
-    root: _RootLoggerConfiguration | None
+    root: _RootLoggerConfiguration
     incremental: bool
     disable_existing_loggers: bool
 
-class _DictConfigArgs(_OptionalDictConfigArgs, TypedDict):
-    version: Literal[1]
-
 # Accept dict[str, Any] to avoid false positives if called with a dict
 # type, since dict types are not compatible with TypedDicts.
 #
diff --git a/mypy/typeshed/stdlib/math.pyi b/mypy/typeshed/stdlib/math.pyi
index 4a4d592b860d..73b53a713301 100644
--- a/mypy/typeshed/stdlib/math.pyi
+++ b/mypy/typeshed/stdlib/math.pyi
@@ -125,7 +125,7 @@ def pow(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ...
 
 if sys.version_info >= (3, 8):
     @overload
-    def prod(__iterable: Iterable[SupportsIndex], *, start: SupportsIndex = 1) -> int: ...  # type: ignore[misc]
+    def prod(__iterable: Iterable[SupportsIndex], *, start: SupportsIndex = 1) -> int: ...  # type: ignore[overload-overlap]
     @overload
     def prod(__iterable: Iterable[_SupportsFloatOrIndex], *, start: _SupportsFloatOrIndex = 1) -> float: ...
 
diff --git a/mypy/typeshed/stdlib/multiprocessing/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/connection.pyi
index 28696fe6a3a3..333b8820d84d 100644
--- a/mypy/typeshed/stdlib/multiprocessing/connection.pyi
+++ b/mypy/typeshed/stdlib/multiprocessing/connection.pyi
@@ -31,6 +31,7 @@ class _ConnectionBase:
     def __exit__(
         self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None
     ) -> None: ...
+    def __del__(self) -> None: ...
 
 class Connection(_ConnectionBase): ...
 
diff --git a/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/mypy/typeshed/stdlib/multiprocessing/managers.pyi
index 9cfc1ebbdd5e..c0ef0a3609d0 100644
--- a/mypy/typeshed/stdlib/multiprocessing/managers.pyi
+++ b/mypy/typeshed/stdlib/multiprocessing/managers.pyi
@@ -216,3 +216,4 @@ if sys.version_info >= (3, 8):
         def get_server(self) -> SharedMemoryServer: ...
         def SharedMemory(self, size: int) -> _SharedMemory: ...
         def ShareableList(self, sequence: Iterable[_SLT] | None) -> _ShareableList[_SLT]: ...
+        def __del__(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/multiprocessing/pool.pyi b/mypy/typeshed/stdlib/multiprocessing/pool.pyi
index c52f1c1f5453..5ad4bfe93fe9 100644
--- a/mypy/typeshed/stdlib/multiprocessing/pool.pyi
+++ b/mypy/typeshed/stdlib/multiprocessing/pool.pyi
@@ -112,6 +112,7 @@ class Pool:
     def __exit__(
         self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
     ) -> None: ...
+    def __del__(self) -> None: ...
 
 class ThreadPool(Pool):
     def __init__(
diff --git a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi
index ae6e2a0ed19f..adbe8b943de6 100644
--- a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi
+++ b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi
@@ -20,6 +20,7 @@ class SharedMemory:
     def size(self) -> int: ...
     def close(self) -> None: ...
     def unlink(self) -> None: ...
+    def __del__(self) -> None: ...
 
 class ShareableList(Generic[_SLT]):
     shm: SharedMemory
diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi
index 2810d086ae49..45eaf2a66e80 100644
--- a/mypy/typeshed/stdlib/os/__init__.pyi
+++ b/mypy/typeshed/stdlib/os/__init__.pyi
@@ -248,7 +248,7 @@ class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]):
             unsetenv: Callable[[AnyStr, AnyStr], object],
         ) -> None: ...
 
-    def setdefault(self, key: AnyStr, value: AnyStr) -> AnyStr: ...  # type: ignore[override]
+    def setdefault(self, key: AnyStr, value: AnyStr) -> AnyStr: ...
     def copy(self) -> dict[AnyStr, AnyStr]: ...
     def __delitem__(self, key: AnyStr) -> None: ...
     def __getitem__(self, key: AnyStr) -> AnyStr: ...
diff --git a/mypy/typeshed/stdlib/pkgutil.pyi b/mypy/typeshed/stdlib/pkgutil.pyi
index 59f1f734cf90..4a0c8d101b7a 100644
--- a/mypy/typeshed/stdlib/pkgutil.pyi
+++ b/mypy/typeshed/stdlib/pkgutil.pyi
@@ -3,6 +3,7 @@ from _typeshed import SupportsRead
 from collections.abc import Callable, Iterable, Iterator
 from importlib.abc import Loader, MetaPathFinder, PathEntryFinder
 from typing import IO, Any, NamedTuple, TypeVar
+from typing_extensions import deprecated
 
 __all__ = [
     "get_importer",
@@ -35,8 +36,10 @@ if sys.version_info < (3, 12):
     class ImpLoader:
         def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ...
 
+@deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.")
 def find_loader(fullname: str) -> Loader | None: ...
 def get_importer(path_item: str) -> PathEntryFinder | None: ...
+@deprecated("Use importlib.util.find_spec() instead. Will be removed in Python 3.14.")
 def get_loader(module_or_name: str) -> Loader | None: ...
 def iter_importers(fullname: str = "") -> Iterator[MetaPathFinder | PathEntryFinder]: ...
 def iter_modules(path: Iterable[str] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ...
diff --git a/mypy/typeshed/stdlib/re.pyi b/mypy/typeshed/stdlib/re.pyi
index 29ee8b66815e..ec532ca3cffe 100644
--- a/mypy/typeshed/stdlib/re.pyi
+++ b/mypy/typeshed/stdlib/re.pyi
@@ -67,7 +67,7 @@ class Match(Generic[AnyStr]):
     @overload
     def expand(self: Match[str], template: str) -> str: ...
     @overload
-    def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ...  # type: ignore[misc]
+    def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ...  # type: ignore[overload-overlap]
     @overload
     def expand(self, template: AnyStr) -> AnyStr: ...
     # group() returns "AnyStr" or "AnyStr | None", depending on the pattern.
@@ -117,19 +117,19 @@ class Pattern(Generic[AnyStr]):
     @overload
     def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
     @overload
-    def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...  # type: ignore[misc]
+    def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...  # type: ignore[overload-overlap]
     @overload
     def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
     @overload
     def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
     @overload
-    def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...  # type: ignore[misc]
+    def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...  # type: ignore[overload-overlap]
     @overload
     def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
     @overload
     def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
     @overload
-    def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...  # type: ignore[misc]
+    def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...  # type: ignore[overload-overlap]
     @overload
     def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
     @overload
@@ -148,13 +148,13 @@ class Pattern(Generic[AnyStr]):
     @overload
     def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ...
     @overload
-    def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[bytes]]: ...  # type: ignore[misc]
+    def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[bytes]]: ...  # type: ignore[overload-overlap]
     @overload
     def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ...
     @overload
     def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ...
     @overload
-    def sub(  # type: ignore[misc]
+    def sub(  # type: ignore[overload-overlap]
         self: Pattern[bytes],
         repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
         string: ReadableBuffer,
@@ -165,7 +165,7 @@ class Pattern(Generic[AnyStr]):
     @overload
     def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ...
     @overload
-    def subn(  # type: ignore[misc]
+    def subn(  # type: ignore[overload-overlap]
         self: Pattern[bytes],
         repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
         string: ReadableBuffer,
diff --git a/mypy/typeshed/stdlib/shelve.pyi b/mypy/typeshed/stdlib/shelve.pyi
index b162b3a85766..59abeafe6fca 100644
--- a/mypy/typeshed/stdlib/shelve.pyi
+++ b/mypy/typeshed/stdlib/shelve.pyi
@@ -16,7 +16,7 @@ class Shelf(MutableMapping[str, _VT]):
     def __iter__(self) -> Iterator[str]: ...
     def __len__(self) -> int: ...
     @overload  # type: ignore[override]
-    def get(self, key: str, default: None = None) -> _VT | None: ...  # type: ignore[misc]  # overlapping overloads
+    def get(self, key: str, default: None = None) -> _VT | None: ...
     @overload
     def get(self, key: str, default: _VT) -> _VT: ...
     @overload
@@ -29,6 +29,7 @@ class Shelf(MutableMapping[str, _VT]):
     def __exit__(
         self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
     ) -> None: ...
+    def __del__(self) -> None: ...
     def close(self) -> None: ...
     def sync(self) -> None: ...
 
diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi
index 38c50d51b129..78e930920073 100644
--- a/mypy/typeshed/stdlib/shutil.pyi
+++ b/mypy/typeshed/stdlib/shutil.pyi
@@ -154,13 +154,13 @@ def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: ...
 # see https://bugs.python.org/issue33140. We keep it here because it's
 # in __all__.
 @overload
-def chown(path: StrOrBytesPath, user: str | int, group: None = None) -> None: ...
+def chown(path: FileDescriptorOrPath, user: str | int, group: None = None) -> None: ...
 @overload
-def chown(path: StrOrBytesPath, user: None = None, *, group: str | int) -> None: ...
+def chown(path: FileDescriptorOrPath, user: None = None, *, group: str | int) -> None: ...
 @overload
-def chown(path: StrOrBytesPath, user: None, group: str | int) -> None: ...
+def chown(path: FileDescriptorOrPath, user: None, group: str | int) -> None: ...
 @overload
-def chown(path: StrOrBytesPath, user: str | int, group: str | int) -> None: ...
+def chown(path: FileDescriptorOrPath, user: str | int, group: str | int) -> None: ...
 
 if sys.version_info >= (3, 8):
     @overload
diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi
index b6cc23651ade..b89623f05c99 100644
--- a/mypy/typeshed/stdlib/subprocess.pyi
+++ b/mypy/typeshed/stdlib/subprocess.pyi
@@ -2564,6 +2564,7 @@ class Popen(Generic[AnyStr]):
     def __exit__(
         self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
     ) -> None: ...
+    def __del__(self) -> None: ...
     if sys.version_info >= (3, 9):
         def __class_getitem__(cls, item: Any) -> GenericAlias: ...
 
diff --git a/mypy/typeshed/stdlib/sunau.pyi b/mypy/typeshed/stdlib/sunau.pyi
index 6109b368c01a..b508a1ea8e20 100644
--- a/mypy/typeshed/stdlib/sunau.pyi
+++ b/mypy/typeshed/stdlib/sunau.pyi
@@ -34,6 +34,7 @@ class Au_read:
     def __init__(self, f: _File) -> None: ...
     def __enter__(self) -> Self: ...
     def __exit__(self, *args: Unused) -> None: ...
+    def __del__(self) -> None: ...
     def getfp(self) -> IO[bytes] | None: ...
     def rewind(self) -> None: ...
     def close(self) -> None: ...
@@ -54,6 +55,7 @@ class Au_write:
     def __init__(self, f: _File) -> None: ...
     def __enter__(self) -> Self: ...
     def __exit__(self, *args: Unused) -> None: ...
+    def __del__(self) -> None: ...
     def setnchannels(self, nchannels: int) -> None: ...
     def getnchannels(self) -> int: ...
     def setsampwidth(self, sampwidth: int) -> None: ...
diff --git a/mypy/typeshed/stdlib/telnetlib.pyi b/mypy/typeshed/stdlib/telnetlib.pyi
index 10f6e4930f75..d244d54f2fbf 100644
--- a/mypy/typeshed/stdlib/telnetlib.pyi
+++ b/mypy/typeshed/stdlib/telnetlib.pyi
@@ -119,3 +119,4 @@ class Telnet:
     def __exit__(
         self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
     ) -> None: ...
+    def __del__(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi
index f8dcb24c1daf..628f99410732 100644
--- a/mypy/typeshed/stdlib/tempfile.pyi
+++ b/mypy/typeshed/stdlib/tempfile.pyi
@@ -629,7 +629,7 @@ class TemporaryDirectory(Generic[AnyStr]):
 
 # The overloads overlap, but they should still work fine.
 @overload
-def mkstemp(  # type: ignore[misc]
+def mkstemp(  # type: ignore[overload-overlap]
     suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False
 ) -> tuple[int, str]: ...
 @overload
@@ -639,7 +639,7 @@ def mkstemp(
 
 # The overloads overlap, but they should still work fine.
 @overload
-def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ...  # type: ignore[misc]
+def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ...  # type: ignore[overload-overlap]
 @overload
 def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ...
 def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: ...
diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi
index d0eb97aa5ebd..a73b1e275f11 100644
--- a/mypy/typeshed/stdlib/tkinter/__init__.pyi
+++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi
@@ -282,6 +282,7 @@ class Variable:
     @deprecated("use trace_info() instead of trace_vinfo()")
     def trace_vinfo(self): ...
     def __eq__(self, other: object) -> bool: ...
+    def __del__(self) -> None: ...
 
 class StringVar(Variable):
     def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: ...
diff --git a/mypy/typeshed/stdlib/tkinter/dnd.pyi b/mypy/typeshed/stdlib/tkinter/dnd.pyi
index 8f438537369c..5a83bb56679f 100644
--- a/mypy/typeshed/stdlib/tkinter/dnd.pyi
+++ b/mypy/typeshed/stdlib/tkinter/dnd.pyi
@@ -15,5 +15,6 @@ class DndHandler:
     def finish(self, event: Event[Misc] | None, commit: int = 0) -> None: ...
     def on_motion(self, event: Event[Misc]) -> None: ...
     def on_release(self, event: Event[Misc]) -> None: ...
+    def __del__(self) -> None: ...
 
 def dnd_start(source: _DndSource, event: Event[Misc]) -> DndHandler | None: ...
diff --git a/mypy/typeshed/stdlib/tkinter/font.pyi b/mypy/typeshed/stdlib/tkinter/font.pyi
index 0a557e921914..9dffcd1ba0c6 100644
--- a/mypy/typeshed/stdlib/tkinter/font.pyi
+++ b/mypy/typeshed/stdlib/tkinter/font.pyi
@@ -101,6 +101,7 @@ class Font:
     def metrics(self, *, displayof: tkinter.Misc | None = ...) -> _MetricsDict: ...
     def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: ...
     def __eq__(self, other: object) -> bool: ...
+    def __del__(self) -> None: ...
 
 def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: ...
 def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: ...
diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi
index bb416717a378..2bbbafbcb945 100644
--- a/mypy/typeshed/stdlib/tkinter/ttk.pyi
+++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi
@@ -1039,7 +1039,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
     @overload
     def heading(self, column: str | int, option: str) -> Any: ...
     @overload
-    def heading(self, column: str | int, option: None = None) -> _TreeviewHeaderDict: ...  # type: ignore[misc]
+    def heading(self, column: str | int, option: None = None) -> _TreeviewHeaderDict: ...  # type: ignore[overload-overlap]
     @overload
     def heading(
         self,
@@ -1083,7 +1083,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
     @overload
     def item(self, item: str | int, option: str) -> Any: ...
     @overload
-    def item(self, item: str | int, option: None = None) -> _TreeviewItemDict: ...  # type: ignore[misc]
+    def item(self, item: str | int, option: None = None) -> _TreeviewItemDict: ...  # type: ignore[overload-overlap]
     @overload
     def item(
         self,
diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi
index 36cd5f1f6e9d..fd0723fd73ed 100644
--- a/mypy/typeshed/stdlib/turtle.pyi
+++ b/mypy/typeshed/stdlib/turtle.pyi
@@ -336,7 +336,7 @@ class TPen:
     def isvisible(self) -> bool: ...
     # Note: signatures 1 and 2 overlap unsafely when no arguments are provided
     @overload
-    def pen(self) -> _PenState: ...  # type: ignore[misc]
+    def pen(self) -> _PenState: ...  # type: ignore[overload-overlap]
     @overload
     def pen(
         self,
@@ -382,7 +382,7 @@ class RawTurtle(TPen, TNavigator):
     def shape(self, name: str) -> None: ...
     # Unsafely overlaps when no arguments are provided
     @overload
-    def shapesize(self) -> tuple[float, float, float]: ...  # type: ignore[misc]
+    def shapesize(self) -> tuple[float, float, float]: ...  # type: ignore[overload-overlap]
     @overload
     def shapesize(
         self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None
@@ -393,7 +393,7 @@ class RawTurtle(TPen, TNavigator):
     def shearfactor(self, shear: float) -> None: ...
     # Unsafely overlaps when no arguments are provided
     @overload
-    def shapetransform(self) -> tuple[float, float, float, float]: ...  # type: ignore[misc]
+    def shapetransform(self) -> tuple[float, float, float, float]: ...  # type: ignore[overload-overlap]
     @overload
     def shapetransform(
         self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None
@@ -617,7 +617,7 @@ def isvisible() -> bool: ...
 
 # Note: signatures 1 and 2 overlap unsafely when no arguments are provided
 @overload
-def pen() -> _PenState: ...  # type: ignore[misc]
+def pen() -> _PenState: ...  # type: ignore[overload-overlap]
 @overload
 def pen(
     pen: _PenState | None = None,
@@ -656,7 +656,7 @@ if sys.version_info >= (3, 12):
 
 # Unsafely overlaps when no arguments are provided
 @overload
-def shapesize() -> tuple[float, float, float]: ...  # type: ignore[misc]
+def shapesize() -> tuple[float, float, float]: ...  # type: ignore[overload-overlap]
 @overload
 def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ...
 @overload
@@ -666,7 +666,7 @@ def shearfactor(shear: float) -> None: ...
 
 # Unsafely overlaps when no arguments are provided
 @overload
-def shapetransform() -> tuple[float, float, float, float]: ...  # type: ignore[misc]
+def shapetransform() -> tuple[float, float, float, float]: ...  # type: ignore[overload-overlap]
 @overload
 def shapetransform(
     t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None
diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi
index fcaf5264c5e3..b26a668d273b 100644
--- a/mypy/typeshed/stdlib/types.pyi
+++ b/mypy/typeshed/stdlib/types.pyi
@@ -593,9 +593,8 @@ _R = TypeVar("_R")
 _P = ParamSpec("_P")
 
 # it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable
-# The type: ignore is due to overlapping overloads, not the use of ParamSpec
 @overload
-def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ...  # type: ignore[misc]
+def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ...  # type: ignore[overload-overlap]
 @overload
 def coroutine(func: _Fn) -> _Fn: ...
 
diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi
index 7694157d70fe..555df0ea47c8 100644
--- a/mypy/typeshed/stdlib/typing.pyi
+++ b/mypy/typeshed/stdlib/typing.pyi
@@ -281,7 +281,12 @@ if sys.version_info >= (3, 10):
 
     class NewType:
         def __init__(self, name: str, tp: Any) -> None: ...
-        def __call__(self, __x: _T) -> _T: ...
+        if sys.version_info >= (3, 11):
+            @staticmethod
+            def __call__(__x: _T) -> _T: ...
+        else:
+            def __call__(self, x: _T) -> _T: ...
+
         def __or__(self, other: Any) -> _SpecialForm: ...
         def __ror__(self, other: Any) -> _SpecialForm: ...
         __supertype__: type
@@ -970,9 +975,8 @@ if sys.version_info >= (3, 12):
         @property
         def __module__(self) -> str | None: ...  # type: ignore[override]
         def __getitem__(self, parameters: Any) -> Any: ...
-        if sys.version_info >= (3, 10):
-            def __or__(self, right: Any) -> _SpecialForm: ...
-            def __ror__(self, left: Any) -> _SpecialForm: ...
+        def __or__(self, right: Any) -> _SpecialForm: ...
+        def __ror__(self, left: Any) -> _SpecialForm: ...
 
 if sys.version_info >= (3, 13):
     def is_protocol(__tp: type) -> bool: ...
diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi
index b5e2341cd020..5c5b756f5256 100644
--- a/mypy/typeshed/stdlib/typing_extensions.pyi
+++ b/mypy/typeshed/stdlib/typing_extensions.pyi
@@ -182,6 +182,7 @@ __all__ = [
     "is_protocol",
     "no_type_check",
     "no_type_check_decorator",
+    "ReadOnly",
 ]
 
 _T = typing.TypeVar("_T")
@@ -220,6 +221,8 @@ def IntVar(name: str) -> Any: ...  # returns a new TypeVar
 class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta):
     __required_keys__: ClassVar[frozenset[str]]
     __optional_keys__: ClassVar[frozenset[str]]
+    __readonly_keys__: ClassVar[frozenset[str]]
+    __mutable_keys__: ClassVar[frozenset[str]]
     __total__: ClassVar[bool]
     __orig_bases__: ClassVar[tuple[Any, ...]]
     def copy(self) -> Self: ...
@@ -283,7 +286,6 @@ class SupportsIndex(Protocol, metaclass=abc.ABCMeta):
 if sys.version_info >= (3, 10):
     from typing import (
         Concatenate as Concatenate,
-        NewType as NewType,
         ParamSpecArgs as ParamSpecArgs,
         ParamSpecKwargs as ParamSpecKwargs,
         TypeAlias as TypeAlias,
@@ -308,18 +310,13 @@ else:
     TypeGuard: _SpecialForm
     def is_typeddict(tp: object) -> bool: ...
 
-    class NewType:
-        def __init__(self, name: str, tp: Any) -> None: ...
-        def __call__(self, __x: _T) -> _T: ...
-        __supertype__: type
-
-# New things in 3.11
-# NamedTuples are not new, but the ability to create generic NamedTuples is new in 3.11
+# New and changed things in 3.11
 if sys.version_info >= (3, 11):
     from typing import (
         LiteralString as LiteralString,
         NamedTuple as NamedTuple,
         Never as Never,
+        NewType as NewType,
         NotRequired as NotRequired,
         Required as Required,
         Self as Self,
@@ -376,6 +373,14 @@ else:
 
         def _replace(self, **kwargs: Any) -> Self: ...
 
+    class NewType:
+        def __init__(self, name: str, tp: Any) -> None: ...
+        def __call__(self, __obj: _T) -> _T: ...
+        __supertype__: type
+        if sys.version_info >= (3, 10):
+            def __or__(self, other: Any) -> _SpecialForm: ...
+            def __ror__(self, other: Any) -> _SpecialForm: ...
+
 # New things in 3.xx
 # The `default` parameter was added to TypeVar, ParamSpec, and TypeVarTuple (PEP 696)
 # The `infer_variance` parameter was added to TypeVar in 3.12 (PEP 695)
@@ -449,7 +454,12 @@ class TypeVarTuple:
     def __init__(self, name: str, *, default: Any | None = None) -> None: ...
     def __iter__(self) -> Any: ...  # Unpack[Self]
 
-def deprecated(__msg: str, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> Callable[[_T], _T]: ...
+class deprecated:
+    message: str
+    category: type[Warning] | None
+    stacklevel: int
+    def __init__(self, __message: str, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> None: ...
+    def __call__(self, __arg: _T) -> _T: ...
 
 if sys.version_info >= (3, 12):
     from collections.abc import Buffer as Buffer
@@ -496,3 +506,5 @@ class Doc:
     def __init__(self, __documentation: str) -> None: ...
     def __hash__(self) -> int: ...
     def __eq__(self, other: object) -> bool: ...
+
+ReadOnly: _SpecialForm
diff --git a/mypy/typeshed/stdlib/unittest/async_case.pyi b/mypy/typeshed/stdlib/unittest/async_case.pyi
index c1de205fbd55..b71eec2e0644 100644
--- a/mypy/typeshed/stdlib/unittest/async_case.pyi
+++ b/mypy/typeshed/stdlib/unittest/async_case.pyi
@@ -17,3 +17,5 @@ class IsolatedAsyncioTestCase(TestCase):
     def addAsyncCleanup(self, __func: Callable[_P, Awaitable[object]], *args: _P.args, **kwargs: _P.kwargs) -> None: ...
     if sys.version_info >= (3, 11):
         async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: ...
+    if sys.version_info >= (3, 9):
+        def __del__(self) -> None: ...
diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi
index baf025bdeb5a..8e96b23ce959 100644
--- a/mypy/typeshed/stdlib/unittest/mock.pyi
+++ b/mypy/typeshed/stdlib/unittest/mock.pyi
@@ -318,7 +318,7 @@ class _patcher:
     # Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock],
     # but that's impossible with the current type system.
     @overload
-    def __call__(  # type: ignore[misc]
+    def __call__(  # type: ignore[overload-overlap]
         self,
         target: str,
         new: _T,
@@ -343,7 +343,7 @@ class _patcher:
     ) -> _patch_default_new: ...
     @overload
     @staticmethod
-    def object(  # type: ignore[misc]
+    def object(
         target: Any,
         attribute: str,
         new: _T,
diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi
index 237a4d264b51..ca3feaea262a 100644
--- a/mypy/typeshed/stdlib/urllib/request.pyi
+++ b/mypy/typeshed/stdlib/urllib/request.pyi
@@ -337,6 +337,7 @@ class URLopener:
     def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ...  # undocumented
     def open_local_file(self, url: str) -> addinfourl: ...  # undocumented
     def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: ...  # undocumented
+    def __del__(self) -> None: ...
 
 class FancyURLopener(URLopener):
     def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ...
diff --git a/mypy/typeshed/stdlib/wave.pyi b/mypy/typeshed/stdlib/wave.pyi
index 0d004d6b2d8a..6b7af2f79da1 100644
--- a/mypy/typeshed/stdlib/wave.pyi
+++ b/mypy/typeshed/stdlib/wave.pyi
@@ -1,7 +1,7 @@
 import sys
 from _typeshed import ReadableBuffer, Unused
 from typing import IO, Any, BinaryIO, NamedTuple, NoReturn, overload
-from typing_extensions import Literal, Self, TypeAlias
+from typing_extensions import Literal, Self, TypeAlias, deprecated
 
 if sys.version_info >= (3, 9):
     __all__ = ["open", "Error", "Wave_read", "Wave_write"]
@@ -26,6 +26,7 @@ class Wave_read:
     def __init__(self, f: _File) -> None: ...
     def __enter__(self) -> Self: ...
     def __exit__(self, *args: Unused) -> None: ...
+    def __del__(self) -> None: ...
     def getfp(self) -> BinaryIO | None: ...
     def rewind(self) -> None: ...
     def close(self) -> None: ...
@@ -37,7 +38,9 @@ class Wave_read:
     def getcomptype(self) -> str: ...
     def getcompname(self) -> str: ...
     def getparams(self) -> _wave_params: ...
+    @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.15")
     def getmarkers(self) -> None: ...
+    @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.15")
     def getmark(self, id: Any) -> NoReturn: ...
     def setpos(self, pos: int) -> None: ...
     def readframes(self, nframes: int) -> bytes: ...
@@ -46,6 +49,7 @@ class Wave_write:
     def __init__(self, f: _File) -> None: ...
     def __enter__(self) -> Self: ...
     def __exit__(self, *args: Unused) -> None: ...
+    def __del__(self) -> None: ...
     def setnchannels(self, nchannels: int) -> None: ...
     def getnchannels(self) -> int: ...
     def setsampwidth(self, sampwidth: int) -> None: ...
@@ -59,8 +63,11 @@ class Wave_write:
     def getcompname(self) -> str: ...
     def setparams(self, params: _wave_params | tuple[int, int, int, int, str, str]) -> None: ...
     def getparams(self) -> _wave_params: ...
+    @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.15")
     def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ...
+    @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.15")
     def getmark(self, id: Any) -> NoReturn: ...
+    @deprecated("Deprecated in Python 3.13; removal scheduled for Python 3.15")
     def getmarkers(self) -> None: ...
     def tell(self) -> int: ...
     def writeframesraw(self, data: ReadableBuffer) -> None: ...
diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi
index ae88f3a317c1..1bb2eacfb46a 100644
--- a/mypy/typeshed/stdlib/weakref.pyi
+++ b/mypy/typeshed/stdlib/weakref.pyi
@@ -75,7 +75,7 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]):
     def items(self) -> Iterator[tuple[_KT, _VT]]: ...  # type: ignore[override]
     def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ...
     def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: ...
-    def setdefault(self, key: _KT, default: _VT) -> _VT: ...  # type: ignore[override]
+    def setdefault(self, key: _KT, default: _VT) -> _VT: ...
     @overload
     def pop(self, key: _KT) -> _VT: ...
     @overload
diff --git a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi
index 74d2efb010cd..2ccbc95bbef0 100644
--- a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi
+++ b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi
@@ -82,6 +82,6 @@ class AttributesNSImpl(AttributesImpl):
     def __contains__(self, name: _NSName) -> bool: ...  # type: ignore[override]
     @overload  # type: ignore[override]
     def get(self, name: _NSName, alternative: None = None) -> str | None: ...
-    @overload  # type: ignore[override]
+    @overload
     def get(self, name: _NSName, alternative: str) -> str: ...
     def items(self) -> list[tuple[_NSName, str]]: ...  # type: ignore[override]
diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi
index b7144f3ab528..5483b84fe6f6 100644
--- a/mypy/typeshed/stdlib/zipfile.pyi
+++ b/mypy/typeshed/stdlib/zipfile.pyi
@@ -26,7 +26,9 @@ __all__ = [
 if sys.version_info >= (3, 8):
     __all__ += ["Path"]
 
-_DateTuple: TypeAlias = tuple[int, int, int, int, int, int]
+# TODO: use TypeAlias when mypy bugs are fixed
+# https://github.com/python/mypy/issues/16581
+_DateTuple = tuple[int, int, int, int, int, int]  # noqa: Y026
 _ReadWriteMode: TypeAlias = Literal["r", "w"]
 _ReadWriteBinaryMode: TypeAlias = Literal["r", "w", "rb", "wb"]
 _ZipFileMode: TypeAlias = Literal["r", "w", "x", "a"]
@@ -187,6 +189,8 @@ class ZipFile:
     if sys.version_info >= (3, 11):
         def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: ...
 
+    def __del__(self) -> None: ...
+
 class PyZipFile(ZipFile):
     def __init__(
         self, file: str | IO[bytes], mode: _ZipFileMode = "r", compression: int = 0, allowZip64: bool = True, optimize: int = -1
@@ -231,7 +235,7 @@ if sys.version_info >= (3, 8):
         def make(cls, source: ZipFile) -> CompleteDirs: ...
         @overload
         @classmethod
-        def make(cls: type[Self], source: StrPath | IO[bytes]) -> Self: ...
+        def make(cls, source: StrPath | IO[bytes]) -> Self: ...
 
     class Path:
         root: CompleteDirs

From 588623ff2fd1d842ce3b70d330d8c37a166db8c4 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Mon, 26 Sep 2022 12:55:07 -0700
Subject: [PATCH 277/288] Remove use of LiteralString in builtins (#13743)

---
 mypy/typeshed/stdlib/builtins.pyi | 93 -------------------------------
 1 file changed, 93 deletions(-)

diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index 92b5279bcfcd..04f2f8a89539 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -57,7 +57,6 @@ from typing import (  # noqa: Y022
 from typing_extensions import (
     Concatenate,
     Literal,
-    LiteralString,
     ParamSpec,
     Self,
     SupportsIndex,
@@ -445,17 +444,8 @@ class str(Sequence[str]):
     def __new__(cls, object: object = ...) -> Self: ...
     @overload
     def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ...
-    @overload
-    def capitalize(self: LiteralString) -> LiteralString: ...
-    @overload
     def capitalize(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def casefold(self: LiteralString) -> LiteralString: ...
-    @overload
     def casefold(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
-    @overload
     def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
     def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
@@ -463,20 +453,11 @@ class str(Sequence[str]):
         self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
     if sys.version_info >= (3, 8):
-        @overload
-        def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ...
-        @overload
         def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ...  # type: ignore[misc]
     else:
-        @overload
-        def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ...
-        @overload
         def expandtabs(self, tabsize: int = 8) -> str: ...  # type: ignore[misc]
 
     def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
-    @overload
-    def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ...
-    @overload
     def format(self, *args: object, **kwargs: object) -> str: ...
     def format_map(self, map: _FormatMapMapping) -> str: ...
     def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
@@ -492,91 +473,32 @@ class str(Sequence[str]):
     def isspace(self) -> bool: ...
     def istitle(self) -> bool: ...
     def isupper(self) -> bool: ...
-    @overload
-    def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ...
-    @overload
     def join(self, __iterable: Iterable[str]) -> str: ...  # type: ignore[misc]
-    @overload
-    def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
-    @overload
     def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
-    @overload
-    def lower(self: LiteralString) -> LiteralString: ...
-    @overload
     def lower(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
-    @overload
     def lstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
-    @overload
-    def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
-    @overload
     def partition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
-    @overload
-    def replace(
-        self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1
-    ) -> LiteralString: ...
-    @overload
     def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ...  # type: ignore[misc]
     if sys.version_info >= (3, 9):
-        @overload
-        def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ...
-        @overload
         def removeprefix(self, __prefix: str) -> str: ...  # type: ignore[misc]
-        @overload
-        def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ...
-        @overload
         def removesuffix(self, __suffix: str) -> str: ...  # type: ignore[misc]
 
     def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
     def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ...
-    @overload
-    def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ...
-    @overload
     def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ...  # type: ignore[misc]
-    @overload
-    def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ...
-    @overload
     def rpartition(self, __sep: str) -> tuple[str, str, str]: ...  # type: ignore[misc]
-    @overload
-    def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
-    @overload
     def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
-    @overload
-    def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
-    @overload
     def rstrip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
-    @overload
-    def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ...
-    @overload
     def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ...  # type: ignore[misc]
-    @overload
-    def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ...
-    @overload
     def splitlines(self, keepends: bool = False) -> list[str]: ...  # type: ignore[misc]
     def startswith(
         self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...
     ) -> bool: ...
-    @overload
-    def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ...
-    @overload
     def strip(self, __chars: str | None = None) -> str: ...  # type: ignore[misc]
-    @overload
-    def swapcase(self: LiteralString) -> LiteralString: ...
-    @overload
     def swapcase(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def title(self: LiteralString) -> LiteralString: ...
-    @overload
     def title(self) -> str: ...  # type: ignore[misc]
     def translate(self, __table: _TranslateTable) -> str: ...
-    @overload
-    def upper(self: LiteralString) -> LiteralString: ...
-    @overload
     def upper(self) -> str: ...  # type: ignore[misc]
-    @overload
-    def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ...
-    @overload
     def zfill(self, __width: SupportsIndex) -> str: ...  # type: ignore[misc]
     @staticmethod
     @overload
@@ -587,9 +509,6 @@ class str(Sequence[str]):
     @staticmethod
     @overload
     def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ...
-    @overload
-    def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ...
-    @overload
     def __add__(self, __value: str) -> str: ...  # type: ignore[misc]
     # Incompatible with Sequence.__contains__
     def __contains__(self, __key: str) -> bool: ...  # type: ignore[override]
@@ -598,25 +517,13 @@ class str(Sequence[str]):
     def __getitem__(self, __key: SupportsIndex | slice) -> str: ...
     def __gt__(self, __value: str) -> bool: ...
     def __hash__(self) -> int: ...
-    @overload
-    def __iter__(self: LiteralString) -> Iterator[LiteralString]: ...
-    @overload
     def __iter__(self) -> Iterator[str]: ...  # type: ignore[misc]
     def __le__(self, __value: str) -> bool: ...
     def __len__(self) -> int: ...
     def __lt__(self, __value: str) -> bool: ...
-    @overload
-    def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ...
-    @overload
     def __mod__(self, __value: Any) -> str: ...
-    @overload
-    def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
-    @overload
     def __mul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __ne__(self, __value: object) -> bool: ...
-    @overload
-    def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ...
-    @overload
     def __rmul__(self, __value: SupportsIndex) -> str: ...  # type: ignore[misc]
     def __getnewargs__(self) -> tuple[str]: ...
 

From bdcc90e85be8b18c3a37b4ef83645e1c09f96495 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Sat, 29 Oct 2022 12:47:21 -0700
Subject: [PATCH 278/288] Revert sum literal integer change (#13961)

This is allegedly causing large performance problems, see 13821

typeshed/8231 had zero hits on mypy_primer, so it's not the worst thing
to undo. Patching this in typeshed also feels weird, since there's a
more general soundness issue. If a typevar has a bound or constraint, we
might not want to solve it to a Literal.

If we can confirm the performance regression or fix the unsoundness
within mypy, I might pursue upstreaming this in typeshed.

(Reminder: add this to the sync_typeshed script once merged)
---
 mypy/typeshed/stdlib/builtins.pyi | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi
index 04f2f8a89539..e3d7ee7e5cc1 100644
--- a/mypy/typeshed/stdlib/builtins.pyi
+++ b/mypy/typeshed/stdlib/builtins.pyi
@@ -1702,11 +1702,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit
 # Instead, we special-case the most common examples of this: bool and literal integers.
 if sys.version_info >= (3, 8):
     @overload
-    def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ...  # type: ignore[overload-overlap]
+    def sum(__iterable: Iterable[bool], start: int = 0) -> int: ...  # type: ignore[overload-overlap]
 
 else:
     @overload
-    def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ...  # type: ignore[overload-overlap]
+    def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ...  # type: ignore[overload-overlap]
 
 @overload
 def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ...

From 3e5d813372e4fc1899319f31425bfc11c27fddb3 Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Mon, 1 May 2023 20:34:55 +0100
Subject: [PATCH 279/288] Revert typeshed ctypes change Since the plugin
 provides superior type checking:
 https://github.com/python/mypy/pull/13987#issuecomment-1310863427 A manual
 cherry-pick of e437cdf.

---
 mypy/typeshed/stdlib/_ctypes.pyi | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi
index 495e29dfd8ce..8a891971e9f1 100644
--- a/mypy/typeshed/stdlib/_ctypes.pyi
+++ b/mypy/typeshed/stdlib/_ctypes.pyi
@@ -167,11 +167,7 @@ class Array(_CData, Generic[_CT]):
     def _type_(self) -> type[_CT]: ...
     @_type_.setter
     def _type_(self, value: type[_CT]) -> None: ...
-    # Note: only available if _CT == c_char
-    @property
-    def raw(self) -> bytes: ...
-    @raw.setter
-    def raw(self, value: ReadableBuffer) -> None: ...
+    raw: bytes  # Note: only available if _CT == c_char
     value: Any  # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise
     # TODO These methods cannot be annotated correctly at the moment.
     # All of these "Any"s stand for the array's element type, but it's not possible to use _CT

From 344298e3a7b1a299092c684c11c28e9f4dc44dd9 Mon Sep 17 00:00:00 2001
From: AlexWaygood <alex.waygood@gmail.com>
Date: Sat, 4 Mar 2023 13:14:11 +0000
Subject: [PATCH 280/288] Revert use of `ParamSpec` for `functools.wraps`

---
 mypy/typeshed/stdlib/functools.pyi | 40 +++++++++++-------------------
 1 file changed, 14 insertions(+), 26 deletions(-)

diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi
index 451896bed72a..4d8c45e96103 100644
--- a/mypy/typeshed/stdlib/functools.pyi
+++ b/mypy/typeshed/stdlib/functools.pyi
@@ -1,9 +1,9 @@
 import sys
 import types
-from _typeshed import SupportsAllComparisons, SupportsItems
+from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems
 from collections.abc import Callable, Hashable, Iterable, Sequence, Sized
 from typing import Any, Generic, NamedTuple, TypeVar, overload
-from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final
+from typing_extensions import Literal, Self, TypeAlias, TypedDict, final
 
 if sys.version_info >= (3, 9):
     from types import GenericAlias
@@ -28,12 +28,10 @@ if sys.version_info >= (3, 8):
 if sys.version_info >= (3, 9):
     __all__ += ["cache"]
 
+_AnyCallable: TypeAlias = Callable[..., object]
+
 _T = TypeVar("_T")
 _S = TypeVar("_S")
-_PWrapped = ParamSpec("_PWrapped")
-_RWrapped = TypeVar("_RWrapped")
-_PWrapper = ParamSpec("_PWrapper")
-_RWrapper = TypeVar("_RWrapper")
 
 @overload
 def reduce(__function: Callable[[_T, _S], _T], __sequence: Iterable[_S], __initial: _T) -> _T: ...
@@ -87,41 +85,31 @@ else:
     ]
 WRAPPER_UPDATES: tuple[Literal["__dict__"]]
 
-class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWrapper]):
-    __wrapped__: Callable[_PWrapped, _RWrapped]
-    def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWrapper: ...
-    # as with ``Callable``, we'll assume that these attributes exist
-    __name__: str
-    __qualname__: str
-
-class _Wrapper(Generic[_PWrapped, _RWrapped]):
-    def __call__(self, f: Callable[_PWrapper, _RWrapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
-
 if sys.version_info >= (3, 12):
     def update_wrapper(
-        wrapper: Callable[_PWrapper, _RWrapper],
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapper: _T,
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
+    ) -> _T: ...
     def wraps(
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
+    ) -> IdentityFunction: ...
 
 else:
     def update_wrapper(
-        wrapper: Callable[_PWrapper, _RWrapper],
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapper: _T,
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ...
+    ) -> _T: ...
     def wraps(
-        wrapped: Callable[_PWrapped, _RWrapped],
+        wrapped: _AnyCallable,
         assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"),
         updated: Sequence[str] = ("__dict__",),
-    ) -> _Wrapper[_PWrapped, _RWrapped]: ...
+    ) -> IdentityFunction: ...
 
 def total_ordering(cls: type[_T]) -> type[_T]: ...
 def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ...

From eb1ee973778e3cf719948e1653db9760ea49405d Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Sun, 3 Dec 2023 00:23:54 +0000
Subject: [PATCH 281/288] Update hashes in `sync-typeshed.py` following recent
 typeshed sync (#16600)

Followup to #16598
---
 misc/sync-typeshed.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py
index 77f921a89b1b..9d6fd92270a5 100644
--- a/misc/sync-typeshed.py
+++ b/misc/sync-typeshed.py
@@ -179,10 +179,10 @@ def main() -> None:
     print("Created typeshed sync commit.")
 
     commits_to_cherry_pick = [
-        "9859fe7ba",  # LiteralString reverts
-        "378a866e9",  # sum reverts
-        "2816b97d5",  # ctypes reverts
-        "7d987a105",  # ParamSpec for functools.wraps
+        "588623ff2",  # LiteralString reverts
+        "bdcc90e85",  # sum reverts
+        "3e5d81337",  # ctypes reverts
+        "344298e3a",  # ParamSpec for functools.wraps
     ]
     for commit in commits_to_cherry_pick:
         try:

From d54cc35a93b1f1bda8f837e0f3ae6f964a1c7feb Mon Sep 17 00:00:00 2001
From: Marcel Telka <marcel@telka.sk>
Date: Mon, 4 Dec 2023 08:11:45 +0100
Subject: [PATCH 282/288] Change example in test cases with no stubs available
 (#16513)

Fixes https://github.com/python/mypy/issues/16466
---
 test-data/unit/pythoneval.test | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test
index 7dd2b2f76f8c..c6ca71f5d56a 100644
--- a/test-data/unit/pythoneval.test
+++ b/test-data/unit/pythoneval.test
@@ -1568,24 +1568,24 @@ note: A user-defined top-level module with name "typing" is not supported
 # flags: --ignore-missing-imports
 import scribe  # No Python 3 stubs available for scribe
 from scribe import x
-import docutils  # Python 3 stubs available for docutils
+import python2  # Python 3 stubs available for python2
 import foobar_asdf
 import jack  # This has a stubs package but was never bundled with mypy, so ignoring works
 [out]
-_testIgnoreImportIfNoPython3StubAvailable.py:4: error: Library stubs not installed for "docutils"
-_testIgnoreImportIfNoPython3StubAvailable.py:4: note: Hint: "python3 -m pip install types-docutils"
+_testIgnoreImportIfNoPython3StubAvailable.py:4: error: Library stubs not installed for "python2"
+_testIgnoreImportIfNoPython3StubAvailable.py:4: note: Hint: "python3 -m pip install types-six"
 _testIgnoreImportIfNoPython3StubAvailable.py:4: note: (or run "mypy --install-types" to install all missing stub packages)
 _testIgnoreImportIfNoPython3StubAvailable.py:4: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
 
 [case testNoPython3StubAvailable]
 import scribe
 from scribe import x
-import docutils
+import python2
 [out]
 _testNoPython3StubAvailable.py:1: error: Cannot find implementation or library stub for module named "scribe"
 _testNoPython3StubAvailable.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
-_testNoPython3StubAvailable.py:3: error: Library stubs not installed for "docutils"
-_testNoPython3StubAvailable.py:3: note: Hint: "python3 -m pip install types-docutils"
+_testNoPython3StubAvailable.py:3: error: Library stubs not installed for "python2"
+_testNoPython3StubAvailable.py:3: note: Hint: "python3 -m pip install types-six"
 _testNoPython3StubAvailable.py:3: note: (or run "mypy --install-types" to install all missing stub packages)
 
 

From c224da5c7c414f92ded4b7816d16d5dd4ed32193 Mon Sep 17 00:00:00 2001
From: Christoph Tyralla <c.tyralla@bjoernsen.de>
Date: Mon, 4 Dec 2023 08:13:58 +0100
Subject: [PATCH 283/288] Do not intersect types in isinstance checks if at
 least one is final (#16330)

Fixes #15148

I think it also fixes the [initial
bug](https://github.com/python/mypy/issues/12163#issue-1131262225)
reported in #12163 (this is why I added a TypeVar test case) but not
[this
bug](https://github.com/python/mypy/issues/12163#issuecomment-1035865305)
reported later in the same issue.
---
 mypy/checker.py                     | 13 +++-
 mypy/messages.py                    |  2 +-
 test-data/unit/check-narrowing.test | 99 +++++++++++++++++++++++++++++
 3 files changed, 111 insertions(+), 3 deletions(-)

diff --git a/mypy/checker.py b/mypy/checker.py
index 7c6f59fafdc8..979a55b223c9 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -5254,6 +5254,15 @@ def _make_fake_typeinfo_and_full_name(
         pretty_names_list = pretty_seq(
             format_type_distinctly(*base_classes, options=self.options, bare=True), "and"
         )
+
+        new_errors = []
+        for base in base_classes:
+            if base.type.is_final:
+                new_errors.append((pretty_names_list, f'"{base.type.name}" is final'))
+        if new_errors:
+            errors.extend(new_errors)
+            return None
+
         try:
             info, full_name = _make_fake_typeinfo_and_full_name(base_classes, curr_module)
             with self.msg.filter_errors() as local_errors:
@@ -5266,10 +5275,10 @@ def _make_fake_typeinfo_and_full_name(
                     self.check_multiple_inheritance(info)
             info.is_intersection = True
         except MroError:
-            errors.append((pretty_names_list, "inconsistent method resolution order"))
+            errors.append((pretty_names_list, "would have inconsistent method resolution order"))
             return None
         if local_errors.has_new_errors():
-            errors.append((pretty_names_list, "incompatible method signatures"))
+            errors.append((pretty_names_list, "would have incompatible method signatures"))
             return None
 
         curr_module.names[full_name] = SymbolTableNode(GDEF, info)
diff --git a/mypy/messages.py b/mypy/messages.py
index ddb048444695..069c4d51e281 100644
--- a/mypy/messages.py
+++ b/mypy/messages.py
@@ -2051,7 +2051,7 @@ def redundant_expr(self, description: str, truthiness: bool, context: Context) -
     def impossible_intersection(
         self, formatted_base_class_list: str, reason: str, context: Context
     ) -> None:
-        template = "Subclass of {} cannot exist: would have {}"
+        template = "Subclass of {} cannot exist: {}"
         self.fail(
             template.format(formatted_base_class_list, reason), context, code=codes.UNREACHABLE
         )
diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test
index d0ad1367aca0..a2859dfffa3a 100644
--- a/test-data/unit/check-narrowing.test
+++ b/test-data/unit/check-narrowing.test
@@ -1020,6 +1020,105 @@ else:
     reveal_type(true_or_false)  # N: Revealed type is "Literal[False]"
 [builtins fixtures/primitives.pyi]
 
+
+[case testNarrowingIsInstanceFinalSubclass]
+# flags: --warn-unreachable
+
+from typing import final
+
+class N: ...
+@final
+class F1: ...
+@final
+class F2: ...
+
+n: N
+f1: F1
+
+if isinstance(f1, F1):
+    reveal_type(f1)  # N: Revealed type is "__main__.F1"
+else:
+    reveal_type(f1)  # E: Statement is unreachable
+
+if isinstance(n, F1):  # E: Subclass of "N" and "F1" cannot exist: "F1" is final
+    reveal_type(n)  # E: Statement is unreachable
+else:
+    reveal_type(n)  # N: Revealed type is "__main__.N"
+
+if isinstance(f1, N):  # E: Subclass of "F1" and "N" cannot exist: "F1" is final
+    reveal_type(f1)  # E: Statement is unreachable
+else:
+    reveal_type(f1)  # N: Revealed type is "__main__.F1"
+
+if isinstance(f1, F2):  # E: Subclass of "F1" and "F2" cannot exist: "F1" is final \
+                        # E: Subclass of "F1" and "F2" cannot exist: "F2" is final
+    reveal_type(f1)  # E: Statement is unreachable
+else:
+    reveal_type(f1)  # N: Revealed type is "__main__.F1"
+[builtins fixtures/isinstance.pyi]
+
+
+[case testNarrowingIsInstanceFinalSubclassWithUnions]
+# flags: --warn-unreachable
+
+from typing import final, Union
+
+class N: ...
+@final
+class F1: ...
+@final
+class F2: ...
+
+n_f1: Union[N, F1]
+n_f2: Union[N, F2]
+f1_f2: Union[F1, F2]
+
+if isinstance(n_f1, F1):
+    reveal_type(n_f1)  # N: Revealed type is "__main__.F1"
+else:
+    reveal_type(n_f1)  # N: Revealed type is "__main__.N"
+
+if isinstance(n_f2, F1):  # E: Subclass of "N" and "F1" cannot exist: "F1" is final \
+                          # E: Subclass of "F2" and "F1" cannot exist: "F2" is final \
+                          # E: Subclass of "F2" and "F1" cannot exist: "F1" is final
+    reveal_type(n_f2)  # E: Statement is unreachable
+else:
+    reveal_type(n_f2)  # N: Revealed type is "Union[__main__.N, __main__.F2]"
+
+if isinstance(f1_f2, F1):
+    reveal_type(f1_f2)  # N: Revealed type is "__main__.F1"
+else:
+    reveal_type(f1_f2)  # N: Revealed type is "__main__.F2"
+[builtins fixtures/isinstance.pyi]
+
+
+[case testNarrowingIsSubclassFinalSubclassWithTypeVar]
+# flags: --warn-unreachable
+
+from typing import final, Type, TypeVar
+
+@final
+class A: ...
+@final
+class B: ...
+
+T = TypeVar("T", A, B)
+
+def f(cls: Type[T]) -> T:
+    if issubclass(cls, A):
+        reveal_type(cls)  # N: Revealed type is "Type[__main__.A]"
+        x: bool
+        if x:
+            return A()
+        else:
+            return B()  # E: Incompatible return value type (got "B", expected "A")
+    assert False
+
+reveal_type(f(A))  # N: Revealed type is "__main__.A"
+reveal_type(f(B))  # N: Revealed type is "__main__.B"
+[builtins fixtures/isinstance.pyi]
+
+
 [case testNarrowingLiteralIdentityCheck]
 from typing import Union
 from typing_extensions import Literal

From 7c33e7c03444ae748b82163e7b4e1666dfaf94c7 Mon Sep 17 00:00:00 2001
From: Ilya Priven <ilya.konstantinov@gmail.com>
Date: Mon, 4 Dec 2023 04:33:25 -0500
Subject: [PATCH 284/288] @final class without __bool__ cannot have falsey
 instances (#16566)

Once class C is final, we know that a derived class won't add a
`__bool__` or a `__len__` so if they're missing, we can assume every
instance of C to be truthy.

Relates to #16565
---
 mypy/typeops.py                 | 25 ++++++++++++-------
 test-data/unit/check-final.test | 44 +++++++++++++++++++++++++++++++++
 2 files changed, 60 insertions(+), 9 deletions(-)

diff --git a/mypy/typeops.py b/mypy/typeops.py
index e92fad0e872c..2bf8ffbf47ab 100644
--- a/mypy/typeops.py
+++ b/mypy/typeops.py
@@ -569,15 +569,15 @@ def _remove_redundant_union_items(items: list[Type], keep_erased: bool) -> list[
     return items
 
 
-def _get_type_special_method_bool_ret_type(t: Type) -> Type | None:
+def _get_type_method_ret_type(t: Type, *, name: str) -> Type | None:
     t = get_proper_type(t)
 
     if isinstance(t, Instance):
-        bool_method = t.type.get("__bool__")
-        if bool_method:
-            callee = get_proper_type(bool_method.type)
-            if isinstance(callee, CallableType):
-                return callee.ret_type
+        sym = t.type.get(name)
+        if sym:
+            sym_type = get_proper_type(sym.type)
+            if isinstance(sym_type, CallableType):
+                return sym_type.ret_type
 
     return None
 
@@ -600,7 +600,9 @@ def true_only(t: Type) -> ProperType:
         can_be_true_items = [item for item in new_items if item.can_be_true]
         return make_simplified_union(can_be_true_items, line=t.line, column=t.column)
     else:
-        ret_type = _get_type_special_method_bool_ret_type(t)
+        ret_type = _get_type_method_ret_type(t, name="__bool__") or _get_type_method_ret_type(
+            t, name="__len__"
+        )
 
         if ret_type and not ret_type.can_be_true:
             return UninhabitedType(line=t.line, column=t.column)
@@ -633,9 +635,14 @@ def false_only(t: Type) -> ProperType:
         can_be_false_items = [item for item in new_items if item.can_be_false]
         return make_simplified_union(can_be_false_items, line=t.line, column=t.column)
     else:
-        ret_type = _get_type_special_method_bool_ret_type(t)
+        ret_type = _get_type_method_ret_type(t, name="__bool__") or _get_type_method_ret_type(
+            t, name="__len__"
+        )
 
-        if ret_type and not ret_type.can_be_false:
+        if ret_type:
+            if not ret_type.can_be_false:
+                return UninhabitedType(line=t.line)
+        elif isinstance(t, Instance) and t.type.is_final:
             return UninhabitedType(line=t.line)
 
         new_t = copy_type(t)
diff --git a/test-data/unit/check-final.test b/test-data/unit/check-final.test
index a2fd64386707..b1378a47b1b1 100644
--- a/test-data/unit/check-final.test
+++ b/test-data/unit/check-final.test
@@ -1130,3 +1130,47 @@ class Child(Parent):
     __foo: Final[int] = 1
     @final
     def __bar(self) -> None: ...
+
+[case testFinalWithoutBool]
+from typing_extensions import final, Literal
+
+class A:
+    pass
+
+@final
+class B:
+    pass
+
+@final
+class C:
+    def __len__(self) -> Literal[1]: return 1
+
+reveal_type(A() and 42)  # N: Revealed type is "Union[__main__.A, Literal[42]?]"
+reveal_type(B() and 42)  # N: Revealed type is "Literal[42]?"
+reveal_type(C() and 42)  # N: Revealed type is "Literal[42]?"
+
+[builtins fixtures/bool.pyi]
+
+[case testFinalWithoutBoolButWithLen]
+from typing_extensions import final, Literal
+
+# Per Python data model, __len__ is called if __bool__ does not exist.
+# In a @final class, __bool__ would not exist.
+
+@final
+class A:
+    def __len__(self) -> int: ...
+
+@final
+class B:
+    def __len__(self) -> Literal[1]: return 1
+
+@final
+class C:
+    def __len__(self) -> Literal[0]: return 0
+
+reveal_type(A() and 42)  # N: Revealed type is "Union[__main__.A, Literal[42]?]"
+reveal_type(B() and 42)  # N: Revealed type is "Literal[42]?"
+reveal_type(C() and 42)  # N: Revealed type is "__main__.C"
+
+[builtins fixtures/bool.pyi]

From f53f4222bbb12d49612657a48b4f2b77e15402fd Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Thu, 7 Dec 2023 10:02:57 -0800
Subject: [PATCH 285/288] Allow type ignores of PEP 695 constructs (#16608)

This is basically a pre-existing bug and affects other errors that
ASTConverter might raise, like merging overloads.

It could vaguely be nice to move all the set_file_ignored_lines into
fastparse, instead of BuildManager.parse_file. Could also clean up the
ignore_errors logic a little bit more.

Fixes #16607
---
 misc/dump-ast.py                     |  4 ++--
 mypy/build.py                        |  4 ++--
 mypy/fastparse.py                    | 31 ++++++++++++++--------------
 mypy/parse.py                        | 12 +++++++++--
 mypy/test/testparse.py               | 16 +++++++++++---
 test-data/unit/check-errorcodes.test |  6 ++++--
 test-data/unit/check-python312.test  |  5 +++++
 7 files changed, 52 insertions(+), 26 deletions(-)

diff --git a/misc/dump-ast.py b/misc/dump-ast.py
index 6f70bbc8c9ed..7fdf905bae0b 100755
--- a/misc/dump-ast.py
+++ b/misc/dump-ast.py
@@ -9,7 +9,7 @@
 import sys
 
 from mypy import defaults
-from mypy.errors import CompileError
+from mypy.errors import CompileError, Errors
 from mypy.options import Options
 from mypy.parse import parse
 
@@ -19,7 +19,7 @@ def dump(fname: str, python_version: tuple[int, int], quiet: bool = False) -> No
     options.python_version = python_version
     with open(fname, "rb") as f:
         s = f.read()
-        tree = parse(s, fname, None, errors=None, options=options)
+        tree = parse(s, fname, None, errors=Errors(options), options=options)
         if not quiet:
             print(tree)
 
diff --git a/mypy/build.py b/mypy/build.py
index 961198fc2fa4..b3ca8d06916d 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -2174,8 +2174,8 @@ def parse_file(self, *, temporary: bool = False) -> None:
                     self.id,
                     self.xpath,
                     source,
-                    self.ignore_all or self.options.ignore_errors,
-                    self.options,
+                    ignore_errors=self.ignore_all or self.options.ignore_errors,
+                    options=self.options,
                 )
 
             else:
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 95d99db84a15..cba01eab2e4e 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -190,7 +190,7 @@ def parse(
     source: str | bytes,
     fnam: str,
     module: str | None,
-    errors: Errors | None = None,
+    errors: Errors,
     options: Options | None = None,
 ) -> MypyFile:
     """Parse a source file, without doing any semantic analysis.
@@ -199,16 +199,13 @@ def parse(
     on failure. Otherwise, use the errors object to report parse errors.
     """
     ignore_errors = (options is not None and options.ignore_errors) or (
-        errors is not None and fnam in errors.ignored_files
+        fnam in errors.ignored_files
     )
     # If errors are ignored, we can drop many function bodies to speed up type checking.
     strip_function_bodies = ignore_errors and (options is None or not options.preserve_asts)
-    raise_on_error = False
+
     if options is None:
         options = Options()
-    if errors is None:
-        errors = Errors(options)
-        raise_on_error = True
     errors.set_file(fnam, module, options=options)
     is_stub_file = fnam.endswith(".pyi")
     if is_stub_file:
@@ -228,11 +225,9 @@ def parse(
             options=options,
             is_stub=is_stub_file,
             errors=errors,
-            ignore_errors=ignore_errors,
             strip_function_bodies=strip_function_bodies,
+            path=fnam,
         ).visit(ast)
-        tree.path = fnam
-        tree.is_stub = is_stub_file
     except SyntaxError as e:
         # alias to please mypyc
         is_py38_or_earlier = sys.version_info < (3, 9)
@@ -254,9 +249,6 @@ def parse(
         )
         tree = MypyFile([], [], False, {})
 
-    if raise_on_error and errors.is_errors():
-        errors.raise_error()
-
     assert isinstance(tree, MypyFile)
     return tree
 
@@ -357,8 +349,8 @@ def __init__(
         is_stub: bool,
         errors: Errors,
         *,
-        ignore_errors: bool,
         strip_function_bodies: bool,
+        path: str,
     ) -> None:
         # 'C' for class, 'D' for function signature, 'F' for function, 'L' for lambda
         self.class_and_function_stack: list[Literal["C", "D", "F", "L"]] = []
@@ -367,8 +359,8 @@ def __init__(
         self.options = options
         self.is_stub = is_stub
         self.errors = errors
-        self.ignore_errors = ignore_errors
         self.strip_function_bodies = strip_function_bodies
+        self.path = path
 
         self.type_ignores: dict[int, list[str]] = {}
 
@@ -380,6 +372,10 @@ def note(self, msg: str, line: int, column: int) -> None:
 
     def fail(self, msg: ErrorMessage, line: int, column: int, blocker: bool = True) -> None:
         if blocker or not self.options.ignore_errors:
+            # Make sure self.errors reflects any type ignores that we have parsed
+            self.errors.set_file_ignored_lines(
+                self.path, self.type_ignores, self.options.ignore_errors
+            )
             self.errors.report(line, column, msg.value, blocker=blocker, code=msg.code)
 
     def fail_merge_overload(self, node: IfStmt) -> None:
@@ -858,8 +854,13 @@ def visit_Module(self, mod: ast3.Module) -> MypyFile:
                 self.type_ignores[ti.lineno] = parsed
             else:
                 self.fail(message_registry.INVALID_TYPE_IGNORE, ti.lineno, -1, blocker=False)
+
         body = self.fix_function_overloads(self.translate_stmt_list(mod.body, ismodule=True))
-        return MypyFile(body, self.imports, False, self.type_ignores)
+
+        ret = MypyFile(body, self.imports, False, ignored_lines=self.type_ignores)
+        ret.is_stub = self.is_stub
+        ret.path = self.path
+        return ret
 
     # --- stmt ---
     # FunctionDef(identifier name, arguments args,
diff --git a/mypy/parse.py b/mypy/parse.py
index 8bf9983967ba..ee61760c0ac0 100644
--- a/mypy/parse.py
+++ b/mypy/parse.py
@@ -6,7 +6,12 @@
 
 
 def parse(
-    source: str | bytes, fnam: str, module: str | None, errors: Errors | None, options: Options
+    source: str | bytes,
+    fnam: str,
+    module: str | None,
+    errors: Errors,
+    options: Options,
+    raise_on_error: bool = False,
 ) -> MypyFile:
     """Parse a source file, without doing any semantic analysis.
 
@@ -19,4 +24,7 @@ def parse(
         source = options.transform_source(source)
     import mypy.fastparse
 
-    return mypy.fastparse.parse(source, fnam=fnam, module=module, errors=errors, options=options)
+    tree = mypy.fastparse.parse(source, fnam=fnam, module=module, errors=errors, options=options)
+    if raise_on_error and errors.is_errors():
+        errors.raise_error()
+    return tree
diff --git a/mypy/test/testparse.py b/mypy/test/testparse.py
index 0140eb072821..e33fa7e53ff0 100644
--- a/mypy/test/testparse.py
+++ b/mypy/test/testparse.py
@@ -8,7 +8,7 @@
 
 from mypy import defaults
 from mypy.config_parser import parse_mypy_comments
-from mypy.errors import CompileError
+from mypy.errors import CompileError, Errors
 from mypy.options import Options
 from mypy.parse import parse
 from mypy.test.data import DataDrivenTestCase, DataSuite
@@ -51,7 +51,12 @@ def test_parser(testcase: DataDrivenTestCase) -> None:
 
     try:
         n = parse(
-            bytes(source, "ascii"), fnam="main", module="__main__", errors=None, options=options
+            bytes(source, "ascii"),
+            fnam="main",
+            module="__main__",
+            errors=Errors(options),
+            options=options,
+            raise_on_error=True,
         )
         a = n.str_with_options(options).split("\n")
     except CompileError as e:
@@ -82,7 +87,12 @@ def test_parse_error(testcase: DataDrivenTestCase) -> None:
             skip()
         # Compile temporary file. The test file contains non-ASCII characters.
         parse(
-            bytes("\n".join(testcase.input), "utf-8"), INPUT_FILE_NAME, "__main__", None, options
+            bytes("\n".join(testcase.input), "utf-8"),
+            INPUT_FILE_NAME,
+            "__main__",
+            errors=Errors(options),
+            options=options,
+            raise_on_error=True,
         )
         raise AssertionError("No errors reported")
     except CompileError as e:
diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test
index 28487a456156..1dd058730f28 100644
--- a/test-data/unit/check-errorcodes.test
+++ b/test-data/unit/check-errorcodes.test
@@ -975,11 +975,13 @@ def f(d: D, s: str) -> None:
 [typing fixtures/typing-typeddict.pyi]
 
 [case testRecommendErrorCode]
-# type: ignore[whatever]  # E: type ignore with error code is not supported for modules; use `# mypy: disable-error-code="whatever"`  [syntax]
+# type: ignore[whatever]  # E: type ignore with error code is not supported for modules; use `# mypy: disable-error-code="whatever"`  [syntax] \
+                          # N: Error code "syntax" not covered by "type: ignore" comment
 1 + "asdf"
 
 [case testRecommendErrorCode2]
-# type: ignore[whatever, other]  # E: type ignore with error code is not supported for modules; use `# mypy: disable-error-code="whatever, other"`  [syntax]
+# type: ignore[whatever, other]  # E: type ignore with error code is not supported for modules; use `# mypy: disable-error-code="whatever, other"`  [syntax] \
+                                 # N: Error code "syntax" not covered by "type: ignore" comment
 1 + "asdf"
 
 [case testShowErrorCodesInConfig]
diff --git a/test-data/unit/check-python312.test b/test-data/unit/check-python312.test
index cb89eb34880c..285563c19991 100644
--- a/test-data/unit/check-python312.test
+++ b/test-data/unit/check-python312.test
@@ -11,6 +11,11 @@ def g(x: MyList[int]) -> MyList[int]:  # E: Variable "__main__.MyList" is not va
                                        # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases
     return reveal_type(x)  # N: Revealed type is "MyList?[builtins.int]"
 
+type MyInt2 = int  # type: ignore[valid-type]
+
+def h(x: MyInt2) -> MyInt2:
+    return reveal_type(x)  # N: Revealed type is "builtins.int"
+
 [case test695Class]
 class MyGen[T]:  # E: PEP 695 generics are not yet supported
     def __init__(self, x: T) -> None:  # E: Name "T" is not defined

From 60d30e36c49a2753de2d71f7dd50f5143bafd307 Mon Sep 17 00:00:00 2001
From: Alex Waygood <Alex.Waygood@Gmail.com>
Date: Tue, 12 Dec 2023 19:12:26 +0000
Subject: [PATCH 286/288] Fix crash with type alias to
 `Callable[[Unpack[Tuple[Any, ...]]], Any]` (#16541)

Fixes #16533
---
 mypy/expandtype.py                      | 28 ++++++++++-------
 test-data/unit/check-typevar-tuple.test | 42 +++++++++++++++++++++++++
 2 files changed, 59 insertions(+), 11 deletions(-)

diff --git a/mypy/expandtype.py b/mypy/expandtype.py
index 3acec4b96d06..f6aa74add9d8 100644
--- a/mypy/expandtype.py
+++ b/mypy/expandtype.py
@@ -307,18 +307,24 @@ def interpolate_args_for_unpack(self, t: CallableType, var_arg: UnpackType) -> l
         suffix = self.expand_types(t.arg_types[star_index + 1 :])
 
         var_arg_type = get_proper_type(var_arg.type)
-        # We have something like Unpack[Tuple[Unpack[Ts], X1, X2]]
-        if isinstance(var_arg_type, TupleType):
-            expanded_tuple = var_arg_type.accept(self)
-            assert isinstance(expanded_tuple, ProperType) and isinstance(expanded_tuple, TupleType)
-            expanded_items = expanded_tuple.items
-            fallback = var_arg_type.partial_fallback
+        if isinstance(var_arg_type, Instance):
+            # we have something like Unpack[Tuple[Any, ...]]
+            new_unpack = var_arg
         else:
-            # We have plain Unpack[Ts]
-            assert isinstance(var_arg_type, TypeVarTupleType)
-            fallback = var_arg_type.tuple_fallback
-            expanded_items = self.expand_unpack(var_arg)
-        new_unpack = UnpackType(TupleType(expanded_items, fallback))
+            if isinstance(var_arg_type, TupleType):
+                # We have something like Unpack[Tuple[Unpack[Ts], X1, X2]]
+                expanded_tuple = var_arg_type.accept(self)
+                assert isinstance(expanded_tuple, ProperType) and isinstance(
+                    expanded_tuple, TupleType
+                )
+                expanded_items = expanded_tuple.items
+                fallback = var_arg_type.partial_fallback
+            else:
+                # We have plain Unpack[Ts]
+                assert isinstance(var_arg_type, TypeVarTupleType), type(var_arg_type)
+                fallback = var_arg_type.tuple_fallback
+                expanded_items = self.expand_unpack(var_arg)
+            new_unpack = UnpackType(TupleType(expanded_items, fallback))
         return prefix + [new_unpack] + suffix
 
     def visit_callable_type(self, t: CallableType) -> CallableType:
diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test
index 487f22699724..9c8d21114d4c 100644
--- a/test-data/unit/check-typevar-tuple.test
+++ b/test-data/unit/check-typevar-tuple.test
@@ -2235,3 +2235,45 @@ z: Tuple[int, Unpack[Tuple[int, ...]]] = (1,)
 w: Tuple[int, Unpack[Tuple[int, ...]]] = (1, *[2, 3, 4])
 t: Tuple[int, Unpack[Tuple[int, ...]]] = (1, *(2, 3, 4))
 [builtins fixtures/tuple.pyi]
+
+[case testAliasToCallableWithUnpack]
+from typing import Any, Callable, Tuple, Unpack
+
+_CallableValue = Callable[[Unpack[Tuple[Any, ...]]], Any]
+def higher_order(f: _CallableValue) -> None: ...
+
+def good1(*args: int) -> None: ...
+def good2(*args: str) -> int: ...
+
+def bad1(a: str, b: int, /) -> None: ...
+def bad2(c: bytes, *args: int) -> str: ...
+def bad3(*, d: str) -> int: ...
+def bad4(**kwargs: None) -> None: ...
+
+higher_order(good1)
+higher_order(good2)
+
+higher_order(bad1)  # E: Argument 1 to "higher_order" has incompatible type "Callable[[str, int], None]"; expected "Callable[[VarArg(Any)], Any]"
+higher_order(bad2)  # E: Argument 1 to "higher_order" has incompatible type "Callable[[bytes, VarArg(int)], str]"; expected "Callable[[VarArg(Any)], Any]"
+higher_order(bad3)  # E: Argument 1 to "higher_order" has incompatible type "Callable[[NamedArg(str, 'd')], int]"; expected "Callable[[VarArg(Any)], Any]"
+higher_order(bad4)  # E: Argument 1 to "higher_order" has incompatible type "Callable[[KwArg(None)], None]"; expected "Callable[[VarArg(Any)], Any]"
+[builtins fixtures/tuple.pyi]
+
+[case testAliasToCallableWithUnpack2]
+from typing import Any, Callable, Tuple, Unpack
+
+_CallableValue = Callable[[int, str, Unpack[Tuple[Any, ...]], int], Any]
+def higher_order(f: _CallableValue) -> None: ...
+
+def good(a: int, b: str, *args: Unpack[Tuple[Unpack[Tuple[Any, ...]], int]]) -> int: ...
+def bad1(a: str, b: int, /) -> None: ...
+def bad2(c: bytes, *args: int) -> str: ...
+def bad3(*, d: str) -> int: ...
+def bad4(**kwargs: None) -> None: ...
+
+higher_order(good)
+higher_order(bad1)  # E: Argument 1 to "higher_order" has incompatible type "Callable[[str, int], None]"; expected "Callable[[int, str, VarArg(Unpack[Tuple[Unpack[Tuple[Any, ...]], int]])], Any]"
+higher_order(bad2)  # E: Argument 1 to "higher_order" has incompatible type "Callable[[bytes, VarArg(int)], str]"; expected "Callable[[int, str, VarArg(Unpack[Tuple[Unpack[Tuple[Any, ...]], int]])], Any]"
+higher_order(bad3)  # E: Argument 1 to "higher_order" has incompatible type "Callable[[NamedArg(str, 'd')], int]"; expected "Callable[[int, str, VarArg(Unpack[Tuple[Unpack[Tuple[Any, ...]], int]])], Any]"
+higher_order(bad4)  # E: Argument 1 to "higher_order" has incompatible type "Callable[[KwArg(None)], None]"; expected "Callable[[int, str, VarArg(Unpack[Tuple[Unpack[Tuple[Any, ...]], int]])], Any]"
+[builtins fixtures/tuple.pyi]

From c9bc833bc8a64e3517a6843bbf982a37ee54f893 Mon Sep 17 00:00:00 2001
From: Shantanu <12621235+hauntsaninja@users.noreply.github.com>
Date: Tue, 12 Dec 2023 15:29:46 -0800
Subject: [PATCH 287/288] Fix tests broken by hatchling (#16655)

---
 test-data/packages/typedpkg-stubs/pyproject.toml      | 2 +-
 test-data/packages/typedpkg/pyproject.toml            | 2 +-
 test-data/packages/typedpkg_ns_a/pyproject.toml       | 2 +-
 test-data/packages/typedpkg_ns_b-stubs/pyproject.toml | 2 +-
 test-data/packages/typedpkg_ns_b/pyproject.toml       | 2 +-
 5 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/test-data/packages/typedpkg-stubs/pyproject.toml b/test-data/packages/typedpkg-stubs/pyproject.toml
index 125816151ef8..c984c5d91e0a 100644
--- a/test-data/packages/typedpkg-stubs/pyproject.toml
+++ b/test-data/packages/typedpkg-stubs/pyproject.toml
@@ -7,5 +7,5 @@ description = 'test'
 include = ["**/*.pyi"]
 
 [build-system]
-requires = ["hatchling"]
+requires = ["hatchling==1.18"]
 build-backend = "hatchling.build"
diff --git a/test-data/packages/typedpkg/pyproject.toml b/test-data/packages/typedpkg/pyproject.toml
index 5269c94320e1..6b55d4b3df60 100644
--- a/test-data/packages/typedpkg/pyproject.toml
+++ b/test-data/packages/typedpkg/pyproject.toml
@@ -4,5 +4,5 @@ version = '0.1'
 description = 'test'
 
 [build-system]
-requires = ["hatchling"]
+requires = ["hatchling==1.18"]
 build-backend = "hatchling.build"
diff --git a/test-data/packages/typedpkg_ns_a/pyproject.toml b/test-data/packages/typedpkg_ns_a/pyproject.toml
index cc464af75b17..f41ad16b5bc2 100644
--- a/test-data/packages/typedpkg_ns_a/pyproject.toml
+++ b/test-data/packages/typedpkg_ns_a/pyproject.toml
@@ -7,5 +7,5 @@ description = 'test'
 include = ["**/*.py", "**/*.pyi", "**/py.typed"]
 
 [build-system]
-requires = ["hatchling"]
+requires = ["hatchling==1.18"]
 build-backend = "hatchling.build"
diff --git a/test-data/packages/typedpkg_ns_b-stubs/pyproject.toml b/test-data/packages/typedpkg_ns_b-stubs/pyproject.toml
index d5275d1ed8b3..2c1c206c361d 100644
--- a/test-data/packages/typedpkg_ns_b-stubs/pyproject.toml
+++ b/test-data/packages/typedpkg_ns_b-stubs/pyproject.toml
@@ -7,5 +7,5 @@ description = 'test'
 include = ["**/*.pyi"]
 
 [build-system]
-requires = ["hatchling"]
+requires = ["hatchling==1.18"]
 build-backend = "hatchling.build"
diff --git a/test-data/packages/typedpkg_ns_b/pyproject.toml b/test-data/packages/typedpkg_ns_b/pyproject.toml
index 8567af11152e..b8ae0d59072e 100644
--- a/test-data/packages/typedpkg_ns_b/pyproject.toml
+++ b/test-data/packages/typedpkg_ns_b/pyproject.toml
@@ -4,5 +4,5 @@ version = '0.1'
 description = 'test'
 
 [build-system]
-requires = ["hatchling"]
+requires = ["hatchling==1.18"]
 build-backend = "hatchling.build"

From 3b467509ee29b8f274c035d78a1c241a781eb311 Mon Sep 17 00:00:00 2001
From: Wesley Collin Wright <wesleyw@dropbox.com>
Date: Thu, 21 Dec 2023 01:00:58 +0000
Subject: [PATCH 288/288] remove +dev suffix from version

---
 mypy/version.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypy/version.py b/mypy/version.py
index 2c2c2b052da2..900fee26d80c 100644
--- a/mypy/version.py
+++ b/mypy/version.py
@@ -8,7 +8,7 @@
 # - Release versions have the form "1.2.3".
 # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440).
 # - Before 1.0 we had the form "0.NNN".
-__version__ = "1.8.0+dev"
+__version__ = "1.8.0"
 base_version = __version__
 
 mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))