1
0

Use type hinting generics in standard collections (#19046)

aka PEP 585, added in Python 3.9

 - https://peps.python.org/pep-0585/
 - https://docs.astral.sh/ruff/rules/non-pep585-annotation/
This commit is contained in:
Andrew Ferrazzutti
2025-10-22 17:48:19 -04:00
committed by GitHub
parent cba3a814c6
commit fc244bb592
539 changed files with 4599 additions and 5066 deletions

View File

@@ -20,7 +20,6 @@
#
from functools import partial
from typing import List, Tuple
from twisted.internet import defer
@@ -169,7 +168,7 @@ class DeferredCacheTestCase(TestCase):
self.assertEqual(v, 2)
def test_invalidate(self) -> None:
cache: DeferredCache[Tuple[str], int] = DeferredCache(
cache: DeferredCache[tuple[str], int] = DeferredCache(
name="test", clock=self.clock, server_name="test_server"
)
cache.prefill(("foo",), 123)
@@ -266,7 +265,7 @@ class DeferredCacheTestCase(TestCase):
cache.get(3)
def test_eviction_iterable(self) -> None:
cache: DeferredCache[int, List[str]] = DeferredCache(
cache: DeferredCache[int, list[str]] = DeferredCache(
name="test",
clock=self.clock,
server_name="test_server",

View File

@@ -23,12 +23,9 @@ from typing import (
Any,
Generator,
Iterable,
List,
Mapping,
NoReturn,
Optional,
Set,
Tuple,
cast,
)
from unittest import mock
@@ -257,7 +254,7 @@ class DescriptorTestCase(unittest.TestCase):
return self.result
obj = Cls()
callbacks: Set[str] = set()
callbacks: set[str] = set()
# set off an asynchronous request
origin_d: Deferred = Deferred()
@@ -435,7 +432,7 @@ class DescriptorTestCase(unittest.TestCase):
_, self.clock = get_clock() # nb must be called this for @cached
@descriptors.cached(iterable=True)
def fn(self, arg1: int, arg2: int) -> Tuple[str, ...]:
def fn(self, arg1: int, arg2: int) -> tuple[str, ...]:
return self.mock(arg1, arg2)
obj = Cls()
@@ -925,7 +922,7 @@ class CachedListDescriptorTestCase(unittest.TestCase):
pass
@descriptors.cachedList(cached_method_name="fn", list_name="args1")
def list_fn(self, args1: List[int]) -> "Deferred[Mapping[int, str]]":
def list_fn(self, args1: list[int]) -> "Deferred[Mapping[int, str]]":
return self.mock(args1)
obj = Cls()
@@ -970,7 +967,7 @@ class CachedListDescriptorTestCase(unittest.TestCase):
pass
@descriptors.cachedList(cached_method_name="fn", list_name="args1")
async def list_fn(self, args1: List[int], arg2: int) -> Mapping[int, str]:
async def list_fn(self, args1: list[int], arg2: int) -> Mapping[int, str]:
# we want this to behave like an asynchronous function
await run_on_reactor()
return self.mock(args1, arg2)
@@ -1012,7 +1009,7 @@ class CachedListDescriptorTestCase(unittest.TestCase):
pass
@cachedList(cached_method_name="fn", list_name="args")
async def list_fn(self, args: List[int]) -> Mapping[int, str]:
async def list_fn(self, args: list[int]) -> Mapping[int, str]:
await complete_lookup
return {arg: str(arg) for arg in args}
@@ -1049,7 +1046,7 @@ class CachedListDescriptorTestCase(unittest.TestCase):
pass
@cachedList(cached_method_name="fn", list_name="args")
async def list_fn(self, args: List[int]) -> Mapping[int, str]:
async def list_fn(self, args: list[int]) -> Mapping[int, str]:
await make_deferred_yieldable(complete_lookup)
self.inner_context_was_finished = current_context().finished
return {arg: str(arg) for arg in args}
@@ -1097,7 +1094,7 @@ class CachedListDescriptorTestCase(unittest.TestCase):
# of arguments as the underlying cached function, just with one of
# the arguments being an iterable
@descriptors.cachedList(cached_method_name="fn", list_name="keys")
def list_fn(self, keys: Iterable[Tuple[str, str]]) -> None:
def list_fn(self, keys: Iterable[tuple[str, str]]) -> None:
pass
# Corrected syntax ✅

View File

@@ -19,7 +19,7 @@
#
import logging
import traceback
from typing import Any, Coroutine, List, NoReturn, Optional, Tuple, TypeVar
from typing import Any, Coroutine, NoReturn, Optional, TypeVar
from parameterized import parameterized_class
@@ -71,7 +71,7 @@ class ObservableDeferredTest(TestCase):
observer1.addBoth(check_called_first)
# store the results
results: List[Optional[int]] = [None, None]
results: list[Optional[int]] = [None, None]
def check_val(res: int, idx: int) -> int:
results[idx] = res
@@ -102,7 +102,7 @@ class ObservableDeferredTest(TestCase):
observer1.addBoth(check_called_first)
# store the results
results: List[Optional[Failure]] = [None, None]
results: list[Optional[Failure]] = [None, None]
def check_failure(res: Failure, idx: int) -> None:
results[idx] = res
@@ -644,7 +644,7 @@ class AwakenableSleeperTests(TestCase):
class GatherCoroutineTests(TestCase):
"""Tests for `gather_optional_coroutines`"""
def make_coroutine(self) -> Tuple[Coroutine[Any, Any, T], "defer.Deferred[T]"]:
def make_coroutine(self) -> tuple[Coroutine[Any, Any, T], "defer.Deferred[T]"]:
"""Returns a coroutine and a deferred that it is waiting on to resolve"""
d: "defer.Deferred[T]" = defer.Deferred()

View File

@@ -18,7 +18,6 @@
# [This file includes modifications made by New Vector Limited]
#
#
from typing import List, Tuple
from prometheus_client import Gauge
@@ -47,7 +46,7 @@ class BatchingQueueTestCase(HomeserverTestCase):
except KeyError:
pass
self._pending_calls: List[Tuple[List[str], defer.Deferred]] = []
self._pending_calls: list[tuple[list[str], defer.Deferred]] = []
self.queue: BatchingQueue[str, str] = BatchingQueue(
name="test_queue",
hs=self.hs,
@@ -55,7 +54,7 @@ class BatchingQueueTestCase(HomeserverTestCase):
process_batch_callback=self._process_queue,
)
async def _process_queue(self, values: List[str]) -> str:
async def _process_queue(self, values: list[str]) -> str:
d: "defer.Deferred[str]" = defer.Deferred()
self._pending_calls.append((values, d))
return await make_deferred_yieldable(d)

View File

@@ -19,7 +19,6 @@
#
#
from typing import List
from synapse.util.caches.expiringcache import ExpiringCache
@@ -65,7 +64,7 @@ class ExpiringCacheTestCase(unittest.HomeserverTestCase):
def test_iterable_eviction(self) -> None:
reactor, clock = get_clock()
cache: ExpiringCache[str, List[int]] = ExpiringCache(
cache: ExpiringCache[str, list[int]] = ExpiringCache(
cache_name="test",
server_name="testserver",
hs=self.hs,

View File

@@ -18,7 +18,7 @@
# [This file includes modifications made by New Vector Limited]
#
#
from typing import Dict, Iterable, List, Sequence
from typing import Iterable, Sequence
from synapse.util.iterutils import (
chunk_seq,
@@ -67,13 +67,13 @@ class SortTopologically(TestCase):
def test_empty(self) -> None:
"Test that an empty graph works correctly"
graph: Dict[int, List[int]] = {}
graph: dict[int, list[int]] = {}
self.assertEqual(list(sorted_topologically([], graph)), [])
def test_handle_empty_graph(self) -> None:
"Test that a graph where a node doesn't have an entry is treated as empty"
graph: Dict[int, List[int]] = {}
graph: dict[int, list[int]] = {}
# For disconnected nodes the output is simply sorted.
self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2])
@@ -81,7 +81,7 @@ class SortTopologically(TestCase):
def test_disconnected(self) -> None:
"Test that a graph with no edges work"
graph: Dict[int, List[int]] = {1: [], 2: []}
graph: dict[int, list[int]] = {1: [], 2: []}
# For disconnected nodes the output is simply sorted.
self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2])
@@ -89,19 +89,19 @@ class SortTopologically(TestCase):
def test_linear(self) -> None:
"Test that a simple `4 -> 3 -> 2 -> 1` graph works"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
def test_subset(self) -> None:
"Test that only sorting a subset of the graph works"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
self.assertEqual(list(sorted_topologically([4, 3], graph)), [3, 4])
def test_fork(self) -> None:
"Test that a forked graph works"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]}
graph: dict[int, list[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]}
# Valid orderings are `[1, 3, 2, 4]` or `[1, 2, 3, 4]`, but we should
# always get the same one.
@@ -109,13 +109,13 @@ class SortTopologically(TestCase):
def test_duplicates(self) -> None:
"Test that a graph with duplicate edges work"
graph: Dict[int, List[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]}
graph: dict[int, list[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]}
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
def test_multiple_paths(self) -> None:
"Test that a graph with multiple paths between two nodes work"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]}
graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]}
self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4])
@@ -126,13 +126,13 @@ class SortTopologicallyBatched(TestCase):
def test_empty(self) -> None:
"Test that an empty graph works correctly"
graph: Dict[int, List[int]] = {}
graph: dict[int, list[int]] = {}
self.assertEqual(list(sorted_topologically_batched([], graph)), [])
def test_handle_empty_graph(self) -> None:
"Test that a graph where a node doesn't have an entry is treated as empty"
graph: Dict[int, List[int]] = {}
graph: dict[int, list[int]] = {}
# For disconnected nodes the output is simply sorted.
self.assertEqual(list(sorted_topologically_batched([1, 2], graph)), [[1, 2]])
@@ -140,7 +140,7 @@ class SortTopologicallyBatched(TestCase):
def test_disconnected(self) -> None:
"Test that a graph with no edges work"
graph: Dict[int, List[int]] = {1: [], 2: []}
graph: dict[int, list[int]] = {1: [], 2: []}
# For disconnected nodes the output is simply sorted.
self.assertEqual(list(sorted_topologically_batched([1, 2], graph)), [[1, 2]])
@@ -148,7 +148,7 @@ class SortTopologicallyBatched(TestCase):
def test_linear(self) -> None:
"Test that a simple `4 -> 3 -> 2 -> 1` graph works"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
self.assertEqual(
list(sorted_topologically_batched([4, 3, 2, 1], graph)),
@@ -157,13 +157,13 @@ class SortTopologicallyBatched(TestCase):
def test_subset(self) -> None:
"Test that only sorting a subset of the graph works"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]}
self.assertEqual(list(sorted_topologically_batched([4, 3], graph)), [[3], [4]])
def test_fork(self) -> None:
"Test that a forked graph works"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]}
graph: dict[int, list[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]}
# Valid orderings are `[1, 3, 2, 4]` or `[1, 2, 3, 4]`, but we should
# always get the same one.
@@ -173,7 +173,7 @@ class SortTopologicallyBatched(TestCase):
def test_duplicates(self) -> None:
"Test that a graph with duplicate edges work"
graph: Dict[int, List[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]}
graph: dict[int, list[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]}
self.assertEqual(
list(sorted_topologically_batched([4, 3, 2, 1], graph)),
@@ -182,7 +182,7 @@ class SortTopologicallyBatched(TestCase):
def test_multiple_paths(self) -> None:
"Test that a graph with multiple paths between two nodes work"
graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]}
graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]}
self.assertEqual(
list(sorted_topologically_batched([4, 3, 2, 1], graph)),

View File

@@ -19,7 +19,7 @@
#
#
from typing import Hashable, Protocol, Tuple
from typing import Hashable, Protocol
from twisted.internet import defer
from twisted.internet.defer import CancelledError, Deferred
@@ -43,7 +43,7 @@ class LinearizerTestCase(unittest.TestCase):
def _start_task(
self, linearizer: Linearizer, key: Hashable
) -> Tuple["Deferred[None]", "Deferred[None]", UnblockFunction]:
) -> tuple["Deferred[None]", "Deferred[None]", UnblockFunction]:
"""Starts a task which acquires the linearizer lock, blocks, then completes.
Args:

View File

@@ -20,7 +20,6 @@
#
from typing import List, Tuple
from unittest.mock import Mock, patch
from synapse.metrics.jemalloc import JemallocStats
@@ -84,7 +83,7 @@ class LruCacheTestCase(unittest.HomeserverTestCase):
def test_del_multi(self) -> None:
# The type here isn't quite correct as they don't handle TreeCache well.
cache: LruCache[Tuple[str, str], str] = LruCache(
cache: LruCache[tuple[str, str], str] = LruCache(
max_size=4,
clock=self.clock,
cache_type=TreeCache,
@@ -211,7 +210,7 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
m3 = Mock()
m4 = Mock()
# The type here isn't quite correct as they don't handle TreeCache well.
cache: LruCache[Tuple[str, str], str] = LruCache(
cache: LruCache[tuple[str, str], str] = LruCache(
max_size=4,
clock=self.clock,
cache_type=TreeCache,
@@ -295,7 +294,7 @@ class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
class LruCacheSizedTestCase(unittest.HomeserverTestCase):
def test_evict(self) -> None:
cache: LruCache[str, List[int]] = LruCache(
cache: LruCache[str, list[int]] = LruCache(
max_size=5, clock=self.clock, size_callback=len, server_name="test_server"
)
cache["key1"] = [0]
@@ -320,7 +319,7 @@ class LruCacheSizedTestCase(unittest.HomeserverTestCase):
def test_zero_size_drop_from_cache(self) -> None:
"""Test that `drop_from_cache` works correctly with 0-sized entries."""
cache: LruCache[str, List[int]] = LruCache(
cache: LruCache[str, list[int]] = LruCache(
max_size=5,
clock=self.clock,
size_callback=lambda x: 0,

View File

@@ -13,7 +13,6 @@
#
import unittest
from typing import Dict
from synapse.util import MutableOverlayMapping
@@ -24,7 +23,7 @@ class TestMutableOverlayMapping(unittest.TestCase):
def test_init(self) -> None:
"""Test initialization with different input types."""
# Test with empty dict
empty_dict: Dict[str, int] = {}
empty_dict: dict[str, int] = {}
mapping = MutableOverlayMapping(empty_dict)
self.assertEqual(len(mapping), 0)

View File

@@ -19,7 +19,7 @@
#
#
from typing import AsyncContextManager, Callable, Sequence, Tuple
from typing import AsyncContextManager, Callable, Sequence
from twisted.internet import defer
from twisted.internet.defer import CancelledError, Deferred
@@ -35,7 +35,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
read_or_write: Callable[[str], AsyncContextManager],
key: str,
return_value: str,
) -> Tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]:
) -> tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]:
"""Starts a reader or writer which acquires the lock, blocks, then completes.
Args:
@@ -67,7 +67,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
def _start_blocking_reader(
self, rwlock: ReadWriteLock, key: str, return_value: str
) -> Tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]:
) -> tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]:
"""Starts a reader which acquires the lock, blocks, then releases the lock.
See the docstring for `_start_reader_or_writer` for details about the arguments
@@ -77,7 +77,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
def _start_blocking_writer(
self, rwlock: ReadWriteLock, key: str, return_value: str
) -> Tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]:
) -> tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]:
"""Starts a writer which acquires the lock, blocks, then releases the lock.
See the docstring for `_start_reader_or_writer` for details about the arguments
@@ -87,7 +87,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
def _start_nonblocking_reader(
self, rwlock: ReadWriteLock, key: str, return_value: str
) -> Tuple["Deferred[str]", "Deferred[None]"]:
) -> tuple["Deferred[str]", "Deferred[None]"]:
"""Starts a reader which acquires the lock, then releases it immediately.
See the docstring for `_start_reader_or_writer` for details about the arguments.
@@ -106,7 +106,7 @@ class ReadWriteLockTestCase(unittest.TestCase):
def _start_nonblocking_writer(
self, rwlock: ReadWriteLock, key: str, return_value: str
) -> Tuple["Deferred[str]", "Deferred[None]"]:
) -> tuple["Deferred[str]", "Deferred[None]"]:
"""Starts a writer which acquires the lock, then releases it immediately.
See the docstring for `_start_reader_or_writer` for details about the arguments.

View File

@@ -18,7 +18,7 @@
# [This file includes modifications made by New Vector Limited]
#
#
from typing import List, Optional, Tuple
from typing import Optional
from twisted.internet.task import deferLater
from twisted.internet.testing import MemoryReactor
@@ -42,7 +42,7 @@ class TestTaskScheduler(HomeserverTestCase):
async def _test_task(
self, task: ScheduledTask
) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
# This test task will copy the parameters to the result
result = None
if task.params:
@@ -85,7 +85,7 @@ class TestTaskScheduler(HomeserverTestCase):
async def _sleeping_task(
self, task: ScheduledTask
) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
# Sleep for a second
await deferLater(self.reactor, 1, lambda: None)
return TaskStatus.COMPLETE, None, None
@@ -103,7 +103,7 @@ class TestTaskScheduler(HomeserverTestCase):
)
)
def get_tasks_of_status(status: TaskStatus) -> List[ScheduledTask]:
def get_tasks_of_status(status: TaskStatus) -> list[ScheduledTask]:
tasks = (
self.get_success(self.task_scheduler.get_task(task_id))
for task_id in task_ids
@@ -151,7 +151,7 @@ class TestTaskScheduler(HomeserverTestCase):
async def _raising_task(
self, task: ScheduledTask
) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
raise Exception("raising")
def test_schedule_raising_task(self) -> None:
@@ -165,7 +165,7 @@ class TestTaskScheduler(HomeserverTestCase):
async def _resumable_task(
self, task: ScheduledTask
) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
if task.result and "in_progress" in task.result:
return TaskStatus.COMPLETE, {"success": True}, None
else:
@@ -201,7 +201,7 @@ class TestTaskSchedulerWithBackgroundWorker(BaseMultiWorkerStreamTestCase):
async def _test_task(
self, task: ScheduledTask
) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
return (TaskStatus.COMPLETE, None, None)
@override_config({"run_background_tasks_on": "worker1"})