1
0

Compare commits

..

17 Commits

Author SHA1 Message Date
Half-Shot
0ce56923da Update delayed events to support no tokens 2025-10-27 16:46:00 +00:00
Shay
f1695ac20e Add an admin API to get the space hierarchy (#19021)
It is often useful when investigating a space to get information about
that space and it's children. This PR adds an Admin API to return
information about a space and it's children, regardless of room
membership. Will not fetch information over federation about remote
rooms that the server is not participating in.
2025-10-24 15:32:16 -05:00
Andrew Ferrazzutti
9d81bb703c Always treat RETURNING as supported by SQL engines (#19047)
Can do this now that SQLite 3.35.0 added support for `RETURNING`.

> The RETURNING syntax has been supported by SQLite since version 3.35.0
(2021-03-12).
>
> *-- https://sqlite.org/lang_returning.html*

This also bumps the minimum supported SQLite version according to
Synapse's [deprecation
policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html#platform-dependencies).

Fix https://github.com/element-hq/synapse/issues/17577
2025-10-24 13:21:49 -05:00
dependabot[bot]
40893be93c Bump idna from 3.10 to 3.11 (#19053)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-24 13:24:16 +01:00
dependabot[bot]
1419b35a40 Bump ijson from 3.4.0 to 3.4.0.post0 (#19051)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-24 13:18:25 +01:00
dependabot[bot]
a2fa61d1b5 Bump msgpack from 1.1.1 to 1.1.2 (#19050)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-24 13:17:35 +01:00
Andrew Morgan
123eff1bc0 Update poetry dev dependencies name (#19081) 2025-10-24 11:19:40 +01:00
Andrew Morgan
a092d2053a Fix deprecation warning in release script (#19080) 2025-10-24 11:19:04 +01:00
Andrew Morgan
45a042ae88 Remove cibuildwheel pp38* skip selector (#19085) 2025-10-24 10:39:29 +01:00
Andrew Morgan
72d0de9f30 Don't exit the release script if there are uncommitted changes (#19088) 2025-10-24 10:39:06 +01:00
Andrew Morgan
5556b491c1 Spruce up generated announcement text in the release script (#19089) 2025-10-24 10:19:44 +01:00
Bryce Servis
b835eb253c Make optional networking and security settings for Redis more apparent in workers.md (#19073)
I couldn't really find any documentation regarding how to setup TLS
communication between Synapse and Redis, so I looked through the source
code and found it. I figured I should go ahead and document it here.
2025-10-23 10:10:10 -05:00
Andrew Ferrazzutti
fc244bb592 Use type hinting generics in standard collections (#19046)
aka PEP 585, added in Python 3.9

 - https://peps.python.org/pep-0585/
 - https://docs.astral.sh/ruff/rules/non-pep585-annotation/
2025-10-22 16:48:19 -05:00
Eric Eastwood
cba3a814c6 Fix lints on develop (#19092)
Snuck in with
ff242faad0
2025-10-22 10:39:04 -05:00
Andrew Morgan
3b59ac3b69 Merge branch 'release-v1.141' into develop 2025-10-21 16:48:09 +01:00
Andrew Morgan
ff242faad0 Don't exit the release script if there are uncommitted changes
Instead, all the user to fix them and retry.
2025-10-21 16:40:26 +01:00
Andrew Morgan
1271e896b5 1.141.0rc1 2025-10-21 11:12:59 +01:00
573 changed files with 5447 additions and 5498 deletions

View File

@@ -99,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
# First calculate the various sytest jobs.
#
# For each type of test we only run on bullseye on PRs
# For each type of test we only run on bookworm on PRs
sytest_tests = [
{
"sytest-tag": "bullseye",
"sytest-tag": "bookworm",
},
{
"sytest-tag": "bullseye",
"sytest-tag": "bookworm",
"postgres": "postgres",
},
{
"sytest-tag": "bullseye",
"sytest-tag": "bookworm",
"postgres": "multi-postgres",
"workers": "workers",
},
{
"sytest-tag": "bullseye",
"sytest-tag": "bookworm",
"postgres": "multi-postgres",
"workers": "workers",
"reactor": "asyncio",
@@ -127,11 +127,11 @@ if not IS_PR:
sytest_tests.extend(
[
{
"sytest-tag": "bullseye",
"sytest-tag": "bookworm",
"reactor": "asyncio",
},
{
"sytest-tag": "bullseye",
"sytest-tag": "bookworm",
"postgres": "postgres",
"reactor": "asyncio",
},

View File

@@ -139,9 +139,9 @@ jobs:
fail-fast: false
matrix:
include:
- sytest-tag: bullseye
- sytest-tag: bookworm
- sytest-tag: bullseye
- sytest-tag: bookworm
postgres: postgres
workers: workers
redis: redis

View File

@@ -108,11 +108,11 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
container:
# We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
# We're using bookworm because that's what Debian oldstable is at the time of writing.
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
image: matrixdotorg/sytest-synapse:bullseye
image: matrixdotorg/sytest-synapse:bookworm
volumes:
- ${{ github.workspace }}:/src

View File

@@ -1,3 +1,40 @@
# Synapse 1.141.0rc1 (2025-10-21)
## Deprecation of MacOS Python wheels
The team has decided to deprecate and eventually stop publishing python wheels
for MacOS. This is a burden on the team, and we're not aware of any parties
that use them. Synapse docker images will continue to work on MacOS, as will
building Synapse from source (though note this requires a Rust compiler).
Publishing MacOS Python wheels will continue for the next few releases. If you
do make use of these wheels downstream, please reach out to us in
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
love to hear from you!
## Features
- Allow using [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) behavior without the opt-in registration flag. Contributed by @tulir @ Beeper. ([\#19031](https://github.com/element-hq/synapse/issues/19031))
- Stabilized support for [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326): Device masquerading for appservices. Contributed by @tulir @ Beeper. ([\#19033](https://github.com/element-hq/synapse/issues/19033))
## Bugfixes
- Fix a bug introduced in 1.136.0 that would prevent Synapse from being able to be `reload`-ed more than once when running under systemd. ([\#19060](https://github.com/element-hq/synapse/issues/19060))
- Fix a bug introduced in 1.140.0 where an internal server error could be raised when hashing user passwords that are too long. ([\#19078](https://github.com/element-hq/synapse/issues/19078))
## Updates to the Docker image
- Update docker image to use Debian trixie as the base and thus Python 3.13. ([\#19064](https://github.com/element-hq/synapse/issues/19064))
## Internal Changes
- Move unique snowflake homeserver background tasks to `start_background_tasks` (the standard pattern for this kind of thing). ([\#19037](https://github.com/element-hq/synapse/issues/19037))
- Drop a deprecated field of the `PyGitHub` dependency in the release script and raise the dependency's minimum version to `1.59.0`. ([\#19039](https://github.com/element-hq/synapse/issues/19039))
- Update TODO list of conflicting areas where we encounter metrics being clobbered (`ApplicationService`). ([\#19040](https://github.com/element-hq/synapse/issues/19040))
# Synapse 1.140.0 (2025-10-14)
## Compatibility notice for users of `synapse-s3-storage-provider`

View File

@@ -2,13 +2,13 @@
import itertools
import os
from typing import Any, Dict
from typing import Any
from packaging.specifiers import SpecifierSet
from setuptools_rust import Binding, RustExtension
def build(setup_kwargs: Dict[str, Any]) -> None:
def build(setup_kwargs: dict[str, Any]) -> None:
original_project_dir = os.path.dirname(os.path.realpath(__file__))
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")

View File

@@ -0,0 +1,2 @@
Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html)
to allow an admin to fetch the space/room hierarchy for a given space.

View File

@@ -1 +0,0 @@
Allow using [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) behavior without the opt-in registration flag. Contributed by @tulir @ Beeper.

View File

@@ -1 +0,0 @@
Stabilized support for [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326): Device masquerading for appservices. Contributed by @tulir @ Beeper.

View File

@@ -1 +0,0 @@
Move unique snowflake homeserver background tasks to `start_background_tasks` (the standard pattern for this kind of thing).

View File

@@ -1 +0,0 @@
Drop a deprecated field of the `PyGitHub` dependency in the release script and raise the dependency's minimum version to `1.59.0`.

View File

@@ -1 +0,0 @@
Update TODO list of conflicting areas where we encounter metrics being clobbered (`ApplicationService`).

1
changelog.d/19046.misc Normal file
View File

@@ -0,0 +1 @@
Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9.

1
changelog.d/19047.doc Normal file
View File

@@ -0,0 +1 @@
Update the link to the Debian oldstable package for SQLite.

1
changelog.d/19047.misc Normal file
View File

@@ -0,0 +1 @@
Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it.

View File

@@ -0,0 +1 @@
Remove support for SQLite < 3.37.2.

View File

@@ -1 +0,0 @@
Fix a bug introduced in 1.136.0 that would prevent Synapse from being able to be `reload`-ed more than once when running under systemd.

View File

@@ -1 +0,0 @@
Update docker image to use Debian trixie as the base and thus Python 3.13.

1
changelog.d/19073.doc Normal file
View File

@@ -0,0 +1 @@
Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce.

View File

@@ -1 +0,0 @@
Fix a bug introduced in 1.140.0 where an internal server error could be raised when hashing user passwords that are too long.

1
changelog.d/19080.misc Normal file
View File

@@ -0,0 +1 @@
Update deprecated code in the release script to prevent a warning message from being printed.

1
changelog.d/19081.misc Normal file
View File

@@ -0,0 +1 @@
Update the deprecated poetry development dependencies group name in `pyproject.toml`.

View File

@@ -1 +0,0 @@
Warn the developer when they are releasing Synapse if a release workflow has been queued for over 15 minutes.

1
changelog.d/19085.misc Normal file
View File

@@ -0,0 +1 @@
Remove `pp38*` skip selector from cibuildwheel to silence warning.

1
changelog.d/19088.misc Normal file
View File

@@ -0,0 +1 @@
Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry.

1
changelog.d/19089.misc Normal file
View File

@@ -0,0 +1 @@
Update the release script's generated announcement text to include a title and extra text for RC's.

1
changelog.d/19092.misc Normal file
View File

@@ -0,0 +1 @@
Fix lints on main branch.

View File

@@ -24,7 +24,6 @@ import datetime
import html
import json
import urllib.request
from typing import List
import pydot
@@ -33,7 +32,7 @@ def make_name(pdu_id: str, origin: str) -> str:
return f"{pdu_id}@{origin}"
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
"""
Generate a dot and SVG file for a graph of events in the room based on the
topological ordering by querying a homeserver.
@@ -127,7 +126,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
def get_pdus(host: str, room: str) -> List[dict]:
def get_pdus(host: str, room: str) -> list[dict]:
transaction = json.loads(
urllib.request.urlopen(
f"http://{host}/_matrix/federation/v1/context/{room}/"

6
debian/changelog vendored
View File

@@ -1,3 +1,9 @@
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
* New Synapse release 1.141.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
matrix-synapse-py3 (1.140.0) stable; urgency=medium
* New Synapse release 1.140.0.

View File

@@ -65,13 +65,10 @@ from itertools import chain
from pathlib import Path
from typing import (
Any,
Dict,
List,
Mapping,
MutableMapping,
NoReturn,
Optional,
Set,
SupportsIndex,
)
@@ -96,7 +93,7 @@ WORKER_PLACEHOLDER_NAME = "placeholder_name"
# Watching /_matrix/media and related needs a "media" listener
# Stream Writers require "client" and "replication" listeners because they
# have to attach by instance_map to the master process and have client endpoints.
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"pusher": {
"app": "synapse.app.generic_worker",
"listener_resources": [],
@@ -408,7 +405,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
def add_worker_roles_to_shared_config(
shared_config: dict,
worker_types_set: Set[str],
worker_types_set: set[str],
worker_name: str,
worker_port: int,
) -> None:
@@ -471,9 +468,9 @@ def add_worker_roles_to_shared_config(
def merge_worker_template_configs(
existing_dict: Optional[Dict[str, Any]],
to_be_merged_dict: Dict[str, Any],
) -> Dict[str, Any]:
existing_dict: Optional[dict[str, Any]],
to_be_merged_dict: dict[str, Any],
) -> dict[str, Any]:
"""When given an existing dict of worker template configuration consisting with both
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
return new dict.
@@ -484,7 +481,7 @@ def merge_worker_template_configs(
existing_dict.
Returns: The newly merged together dict values.
"""
new_dict: Dict[str, Any] = {}
new_dict: dict[str, Any] = {}
if not existing_dict:
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
new_dict = to_be_merged_dict.copy()
@@ -509,8 +506,8 @@ def merge_worker_template_configs(
def insert_worker_name_for_worker_config(
existing_dict: Dict[str, Any], worker_name: str
) -> Dict[str, Any]:
existing_dict: dict[str, Any], worker_name: str
) -> dict[str, Any]:
"""Insert a given worker name into the worker's configuration dict.
Args:
@@ -526,7 +523,7 @@ def insert_worker_name_for_worker_config(
return dict_to_edit
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
"""
Apply multiplier(if found) by returning a new expanded list with some basic error
checking.
@@ -587,7 +584,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
def split_and_strip_string(
given_string: str, split_char: str, max_split: SupportsIndex = -1
) -> List[str]:
) -> list[str]:
"""
Helper to split a string on split_char and strip whitespace from each end of each
element.
@@ -616,8 +613,8 @@ def generate_base_homeserver_config() -> None:
def parse_worker_types(
requested_worker_types: List[str],
) -> Dict[str, Set[str]]:
requested_worker_types: list[str],
) -> dict[str, set[str]]:
"""Read the desired list of requested workers and prepare the data for use in
generating worker config files while also checking for potential gotchas.
@@ -633,14 +630,14 @@ def parse_worker_types(
# A counter of worker_base_name -> int. Used for determining the name for a given
# worker when generating its config file, as each worker's name is just
# worker_base_name followed by instance number
worker_base_name_counter: Dict[str, int] = defaultdict(int)
worker_base_name_counter: dict[str, int] = defaultdict(int)
# Similar to above, but more finely grained. This is used to determine we don't have
# more than a single worker for cases where multiples would be bad(e.g. presence).
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
worker_type_shard_counter: dict[str, int] = defaultdict(int)
# The final result of all this processing
dict_to_return: Dict[str, Set[str]] = {}
dict_to_return: dict[str, set[str]] = {}
# Handle any multipliers requested for given workers.
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
@@ -684,7 +681,7 @@ def parse_worker_types(
# Split the worker_type_string on "+", remove whitespace from ends then make
# the list a set so it's deduplicated.
worker_types_set: Set[str] = set(
worker_types_set: set[str] = set(
split_and_strip_string(worker_type_string, "+")
)
@@ -743,7 +740,7 @@ def generate_worker_files(
environ: Mapping[str, str],
config_path: str,
data_dir: str,
requested_worker_types: Dict[str, Set[str]],
requested_worker_types: dict[str, set[str]],
) -> None:
"""Read the desired workers(if any) that is passed in and generate shared
homeserver, nginx and supervisord configs.
@@ -764,7 +761,7 @@ def generate_worker_files(
# First read the original config file and extract the listeners block. Then we'll
# add another listener for replication. Later we'll write out the result to the
# shared config file.
listeners: List[Any]
listeners: list[Any]
if using_unix_sockets:
listeners = [
{
@@ -792,12 +789,12 @@ def generate_worker_files(
# base shared worker jinja2 template. This config file will be passed to all
# workers, included Synapse's main process. It is intended mainly for disabling
# functionality when certain workers are spun up, and adding a replication listener.
shared_config: Dict[str, Any] = {"listeners": listeners}
shared_config: dict[str, Any] = {"listeners": listeners}
# List of dicts that describe workers.
# We pass this to the Supervisor template later to generate the appropriate
# program blocks.
worker_descriptors: List[Dict[str, Any]] = []
worker_descriptors: list[dict[str, Any]] = []
# Upstreams for load-balancing purposes. This dict takes the form of the worker
# type to the ports of each worker. For example:
@@ -805,14 +802,14 @@ def generate_worker_files(
# worker_type: {1234, 1235, ...}}
# }
# and will be used to construct 'upstream' nginx directives.
nginx_upstreams: Dict[str, Set[int]] = {}
nginx_upstreams: dict[str, set[int]] = {}
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
# will be placed after the proxy_pass directive. The main benefit to representing
# this data as a dict over a str is that we can easily deduplicate endpoints
# across multiple instances of the same worker. The final rendering will be combined
# with nginx_upstreams and placed in /etc/nginx/conf.d.
nginx_locations: Dict[str, str] = {}
nginx_locations: dict[str, str] = {}
# Create the worker configuration directory if it doesn't already exist
os.makedirs("/conf/workers", exist_ok=True)
@@ -846,7 +843,7 @@ def generate_worker_files(
# yaml config file
for worker_name, worker_types_set in requested_worker_types.items():
# The collected and processed data will live here.
worker_config: Dict[str, Any] = {}
worker_config: dict[str, Any] = {}
# Merge all worker config templates for this worker into a single config
for worker_type in worker_types_set:
@@ -1029,7 +1026,7 @@ def generate_worker_log_config(
Returns: the path to the generated file
"""
# Check whether we should write worker logs to disk, in addition to the console
extra_log_template_args: Dict[str, Optional[str]] = {}
extra_log_template_args: dict[str, Optional[str]] = {}
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
@@ -1053,7 +1050,7 @@ def generate_worker_log_config(
return log_config_filepath
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
parser = ArgumentParser()
parser.add_argument(
"--generate-only",
@@ -1087,7 +1084,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
if not worker_types_env:
# No workers, just the main process
worker_types = []
requested_worker_types: Dict[str, Any] = {}
requested_worker_types: dict[str, Any] = {}
else:
# Split type names by comma, ignoring whitespace.
worker_types = split_and_strip_string(worker_types_env, ",")

View File

@@ -6,7 +6,7 @@ import os
import platform
import subprocess
import sys
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
from typing import Any, Mapping, MutableMapping, NoReturn, Optional
import jinja2
@@ -69,7 +69,7 @@ def generate_config_from_template(
)
# populate some params from data files (if they exist, else create new ones)
environ: Dict[str, Any] = dict(os_environ)
environ: dict[str, Any] = dict(os_environ)
secrets = {
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
@@ -200,7 +200,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
subprocess.run(args, check=True)
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
mode = args[1] if len(args) > 1 else "run"
# if we were given an explicit user to switch to, do so

View File

@@ -1115,3 +1115,76 @@ Example response:
]
}
```
# Admin Space Hierarchy Endpoint
This API allows an admin to fetch the space/room hierarchy for a given space,
returning details about that room and any children the room may have, paginating
over the space tree in a depth-first manner to locate child rooms. This is
functionally similar to the [CS Hierarchy](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1roomsroomidhierarchy) endpoint but does not check for
room membership when returning room summaries.
The endpoint does not query other servers over federation about remote rooms
that the server has not joined. This is a deliberate trade-off: while this
means it will leave some holes in the hierarchy that we could otherwise
sometimes fill in, it significantly improves the endpoint's response time and
the admin endpoint is designed for managing rooms local to the homeserver
anyway.
**Parameters**
The following query parameters are available:
* `from` - An optional pagination token, provided when there are more rooms to
return than the limit.
* `limit` - Maximum amount of rooms to return. Must be a non-negative integer,
defaults to `50`.
* `max_depth` - The maximum depth in the tree to explore, must be a non-negative
integer. 0 would correspond to just the root room, 1 would include just the
root room's children, etc. If not provided will recurse into the space tree without limit.
Request:
```http
GET /_synapse/admin/v1/rooms/<room_id>/hierarchy
```
Response:
```json
{
"rooms":
[
{ "children_state": [
{
"content": {
"via": ["local_test_server"]
},
"origin_server_ts": 1500,
"sender": "@user:test",
"state_key": "!QrMkkqBSwYRIFNFCso:test",
"type": "m.space.child"
}
],
"name": "space room",
"guest_can_join": false,
"join_rule": "public",
"num_joined_members": 1,
"room_id": "!sPOpNyMHbZAoAOsOFL:test",
"room_type": "m.space",
"world_readable": false
},
{
"children_state": [],
"guest_can_join": true,
"join_rule": "invite",
"name": "nefarious",
"num_joined_members": 1,
"room_id": "!QrMkkqBSwYRIFNFCso:test",
"topic": "being bad",
"world_readable": false}
],
"next_batch": "KUYmRbeSpAoaAIgOKGgyaCEn"
}
```

View File

@@ -21,7 +21,7 @@ people building from source should ensure they can fetch recent versions of Rust
(e.g. by using [rustup](https://rustup.rs/)).
The oldest supported version of SQLite is the version
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
[provided](https://packages.debian.org/oldstable/libsqlite3-0) by
[Debian oldstable](https://wiki.debian.org/DebianOldStable).

View File

@@ -320,7 +320,7 @@ The following command will let you run the integration test with the most common
configuration:
```sh
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bookworm
```
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)

View File

@@ -120,6 +120,9 @@ worker_replication_secret: ""
redis:
enabled: true
# For additional Redis configuration options (TLS, authentication, etc.),
# see the Synapse configuration documentation:
# https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#redis
instance_map:
main:

View File

@@ -69,7 +69,7 @@ warn_unused_ignores = False
;; https://github.com/python/typeshed/tree/master/stubs
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
;; `[tool.poetry.dev-dependencies]` list.
;; `[tool.poetry.group.dev.dependencies]` list.
# https://github.com/lepture/authlib/issues/460
[mypy-authlib.*]

317
poetry.lock generated
View File

@@ -673,14 +673,14 @@ test = ["coverage[toml]", "pretend", "pytest", "pytest-cov"]
[[package]]
name = "idna"
version = "3.10"
version = "3.11"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
{file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"},
{file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"},
]
[package.extras]
@@ -688,97 +688,107 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2
[[package]]
name = "ijson"
version = "3.4.0"
version = "3.4.0.post0"
description = "Iterative JSON parser with standard Python iterator interfaces"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "ijson-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e27e50f6dcdee648f704abc5d31b976cd2f90b4642ed447cf03296d138433d09"},
{file = "ijson-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2a753be681ac930740a4af9c93cfb4edc49a167faed48061ea650dc5b0f406f1"},
{file = "ijson-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a07c47aed534e0ec198e6a2d4360b259d32ac654af59c015afc517ad7973b7fb"},
{file = "ijson-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c55f48181e11c597cd7146fb31edc8058391201ead69f8f40d2ecbb0b3e4fc6"},
{file = "ijson-3.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd5669f96f79d8a2dd5ae81cbd06770a4d42c435fd4a75c74ef28d9913b697d"},
{file = "ijson-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e3ddd46d16b8542c63b1b8af7006c758d4e21cc1b86122c15f8530fae773461"},
{file = "ijson-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1504cec7fe04be2bb0cc33b50c9dd3f83f98c0540ad4991d4017373b7853cfe6"},
{file = "ijson-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2f2ff456adeb216603e25d7915f10584c1b958b6eafa60038d76d08fc8a5fb06"},
{file = "ijson-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0ab00d75d61613a125fbbb524551658b1ad6919a52271ca16563ca5bc2737bb1"},
{file = "ijson-3.4.0-cp310-cp310-win32.whl", hash = "sha256:ada421fd59fe2bfa4cfa64ba39aeba3f0753696cdcd4d50396a85f38b1d12b01"},
{file = "ijson-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:8c75e82cec05d00ed3a4af5f4edf08f59d536ed1a86ac7e84044870872d82a33"},
{file = "ijson-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e369bf5a173ca51846c243002ad8025d32032532523b06510881ecc8723ee54"},
{file = "ijson-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26e7da0a3cd2a56a1fde1b34231867693f21c528b683856f6691e95f9f39caec"},
{file = "ijson-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c28c7f604729be22aa453e604e9617b665fa0c24cd25f9f47a970e8130c571a"},
{file = "ijson-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed8bcb84d3468940f97869da323ba09ae3e6b950df11dea9b62e2b231ca1e3"},
{file = "ijson-3.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:296bc824f4088f2af814aaf973b0435bc887ce3d9f517b1577cc4e7d1afb1cb7"},
{file = "ijson-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8145f8f40617b6a8aa24e28559d0adc8b889e56a203725226a8a60fa3501073f"},
{file = "ijson-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b674a97bd503ea21bc85103e06b6493b1b2a12da3372950f53e1c664566a33a4"},
{file = "ijson-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8bc731cf1c3282b021d3407a601a5a327613da9ad3c4cecb1123232623ae1826"},
{file = "ijson-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42ace5e940e0cf58c9de72f688d6829ddd815096d07927ee7e77df2648006365"},
{file = "ijson-3.4.0-cp311-cp311-win32.whl", hash = "sha256:5be39a0df4cd3f02b304382ea8885391900ac62e95888af47525a287c50005e9"},
{file = "ijson-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:0b1be1781792291e70d2e177acf564ec672a7907ba74f313583bdf39fe81f9b7"},
{file = "ijson-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:956b148f88259a80a9027ffbe2d91705fae0c004fbfba3e5a24028fbe72311a9"},
{file = "ijson-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:06b89960f5c721106394c7fba5760b3f67c515b8eb7d80f612388f5eca2f4621"},
{file = "ijson-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a0bb591cf250dd7e9dfab69d634745a7f3272d31cfe879f9156e0a081fd97ee"},
{file = "ijson-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72e92de999977f4c6b660ffcf2b8d59604ccd531edcbfde05b642baf283e0de8"},
{file = "ijson-3.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e9602157a5b869d44b6896e64f502c712a312fcde044c2e586fccb85d3e316e"},
{file = "ijson-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e83660edb931a425b7ff662eb49db1f10d30ca6d4d350e5630edbed098bc01"},
{file = "ijson-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:49bf8eac1c7b7913073865a859c215488461f7591b4fa6a33c14b51cb73659d0"},
{file = "ijson-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:160b09273cb42019f1811469508b0a057d19f26434d44752bde6f281da6d3f32"},
{file = "ijson-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2019ff4e6f354aa00c76c8591bd450899111c61f2354ad55cc127e2ce2492c44"},
{file = "ijson-3.4.0-cp312-cp312-win32.whl", hash = "sha256:931c007bf6bb8330705429989b2deed6838c22b63358a330bf362b6e458ba0bf"},
{file = "ijson-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:71523f2b64cb856a820223e94d23e88369f193017ecc789bb4de198cc9d349eb"},
{file = "ijson-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e8d96f88d75196a61c9d9443de2b72c2d4a7ba9456ff117b57ae3bba23a54256"},
{file = "ijson-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c45906ce2c1d3b62f15645476fc3a6ca279549127f01662a39ca5ed334a00cf9"},
{file = "ijson-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4ab4bc2119b35c4363ea49f29563612237cae9413d2fbe54b223be098b97bc9e"},
{file = "ijson-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97b0a9b5a15e61dfb1f14921ea4e0dba39f3a650df6d8f444ddbc2b19b479ff1"},
{file = "ijson-3.4.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3047bb994dabedf11de11076ed1147a307924b6e5e2df6784fb2599c4ad8c60"},
{file = "ijson-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68c83161b052e9f5dc8191acbc862bb1e63f8a35344cb5cd0db1afd3afd487a6"},
{file = "ijson-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1eebd9b6c20eb1dffde0ae1f0fbb4aeacec2eb7b89adb5c7c0449fc9fd742760"},
{file = "ijson-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:13fb6d5c35192c541421f3ee81239d91fc15a8d8f26c869250f941f4b346a86c"},
{file = "ijson-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:28b7196ff7b37c4897c547a28fa4876919696739fc91c1f347651c9736877c69"},
{file = "ijson-3.4.0-cp313-cp313-win32.whl", hash = "sha256:3c2691d2da42629522140f77b99587d6f5010440d58d36616f33bc7bdc830cc3"},
{file = "ijson-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:c4554718c275a044c47eb3874f78f2c939f300215d9031e785a6711cc51b83fc"},
{file = "ijson-3.4.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:915a65e3f3c0eee2ea937bc62aaedb6c14cc1e8f0bb9f3f4fb5a9e2bbfa4b480"},
{file = "ijson-3.4.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:afbe9748707684b6c5adc295c4fdcf27765b300aec4d484e14a13dca4e5c0afa"},
{file = "ijson-3.4.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d823f8f321b4d8d5fa020d0a84f089fec5d52b7c0762430476d9f8bf95bbc1a9"},
{file = "ijson-3.4.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a0a2c54f3becf76881188beefd98b484b1d3bd005769a740d5b433b089fa23"},
{file = "ijson-3.4.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ced19a83ab09afa16257a0b15bc1aa888dbc555cb754be09d375c7f8d41051f2"},
{file = "ijson-3.4.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8100f9885eff1f38d35cef80ef759a1bbf5fc946349afa681bd7d0e681b7f1a0"},
{file = "ijson-3.4.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d7bcc3f7f21b0f703031ecd15209b1284ea51b2a329d66074b5261de3916c1eb"},
{file = "ijson-3.4.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2dcb190227b09dd171bdcbfe4720fddd574933c66314818dfb3960c8a6246a77"},
{file = "ijson-3.4.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:eda4cfb1d49c6073a901735aaa62e39cb7ab47f3ad7bb184862562f776f1fa8a"},
{file = "ijson-3.4.0-cp313-cp313t-win32.whl", hash = "sha256:0772638efa1f3b72b51736833404f1cbd2f5beeb9c1a3d392e7d385b9160cba7"},
{file = "ijson-3.4.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3d8a0d67f36e4fb97c61a724456ef0791504b16ce6f74917a31c2e92309bbeb9"},
{file = "ijson-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8a990401dc7350c1739f42187823e68d2ef6964b55040c6e9f3a29461f9929e2"},
{file = "ijson-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80f50e0f5da4cd6b65e2d8ff38cb61b26559608a05dd3a3f9cfa6f19848e6f22"},
{file = "ijson-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d9ca52f5650d820a2e7aa672dea1c560f609e165337e5b3ed7cf56d696bf309"},
{file = "ijson-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:940c8c5fd20fb89b56dde9194a4f1c7b779149f1ab26af6d8dc1da51a95d26dd"},
{file = "ijson-3.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41dbb525666017ad856ac9b4f0f4b87d3e56b7dfde680d5f6d123556b22e2172"},
{file = "ijson-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9f84f5e2eea5c2d271c97221c382db005534294d1175ddd046a12369617c41c"},
{file = "ijson-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0cd126c11835839bba8ac0baaba568f67d701fc4f717791cf37b10b74a2ebd7"},
{file = "ijson-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f9a9d3bbc6d91c24a2524a189d2aca703cb5f7e8eb34ad0aff3c91702404a983"},
{file = "ijson-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:56679ee133470d0f1f598a8ad109d760fcfebeef4819531e29335aefb7e4cb1a"},
{file = "ijson-3.4.0-cp39-cp39-win32.whl", hash = "sha256:583c15ded42ba80104fa1d0fa0dfdd89bb47922f3bb893a931bb843aeb55a3f3"},
{file = "ijson-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:4563e603e56f4451572d96b47311dffef5b933d825f3417881d4d3630c6edac2"},
{file = "ijson-3.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:54e989c35dba9cf163d532c14bcf0c260897d5f465643f0cd1fba9c908bed7ef"},
{file = "ijson-3.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:494eeb8e87afef22fbb969a4cb81ac2c535f30406f334fb6136e9117b0bb5380"},
{file = "ijson-3.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81603de95de1688958af65cd2294881a4790edae7de540b70c65c8253c5dc44a"},
{file = "ijson-3.4.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8524be12c1773e1be466034cc49c1ecbe3d5b47bb86217bd2a57f73f970a6c19"},
{file = "ijson-3.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17994696ec895d05e0cfa21b11c68c920c82634b4a3d8b8a1455d6fe9fdee8f7"},
{file = "ijson-3.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0b67727aaee55d43b2e82b6a866c3cbcb2b66a5e9894212190cbd8773d0d9857"},
{file = "ijson-3.4.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdc8c5ca0eec789ed99db29c68012dda05027af0860bb360afd28d825238d69d"},
{file = "ijson-3.4.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8e6b44b6ec45d5b1a0ee9d97e0e65ab7f62258727004cbbe202bf5f198bc21f7"},
{file = "ijson-3.4.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b51e239e4cb537929796e840d349fc731fdc0d58b1a0683ce5465ad725321e0f"},
{file = "ijson-3.4.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed05d43ec02be8ddb1ab59579761f6656b25d241a77fd74f4f0f7ec09074318a"},
{file = "ijson-3.4.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfeca1aaa59d93fd0a3718cbe5f7ef0effff85cf837e0bceb71831a47f39cc14"},
{file = "ijson-3.4.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:7ca72ca12e9a1dd4252c97d952be34282907f263f7e28fcdff3a01b83981e837"},
{file = "ijson-3.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f79b2cd52bd220fff83b3ee4ef89b54fd897f57cc8564a6d8ab7ac669de3930"},
{file = "ijson-3.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d16eed737610ad5ad8989b5864fbe09c64133129734e840c29085bb0d497fb03"},
{file = "ijson-3.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b3aac1d7a27e1e3bdec5bd0689afe55c34aa499baa06a80852eda31f1ffa6dc"},
{file = "ijson-3.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:784ae654aa9851851e87f323e9429b20b58a5399f83e6a7e348e080f2892081f"},
{file = "ijson-3.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d05bd8fa6a8adefb32bbf7b993d2a2f4507db08453dd1a444c281413a6d9685"},
{file = "ijson-3.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b5a05fd935cc28786b88c16976313086cd96414c6a3eb0a3822c47ab48b1793e"},
{file = "ijson-3.4.0.tar.gz", hash = "sha256:5f74dcbad9d592c428d3ca3957f7115a42689ee7ee941458860900236ae9bb13"},
{file = "ijson-3.4.0.post0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f904a405b58a04b6ef0425f1babbc5c65feb66b0a4cc7f214d4ad7de106f77d"},
{file = "ijson-3.4.0.post0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a07dcc1a8a1ddd76131a7c7528cbd12951c2e34eb3c3d63697b905069a2d65b1"},
{file = "ijson-3.4.0.post0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab3be841b8c430c1883b8c0775eb551f21b5500c102c7ee828afa35ddd701bdd"},
{file = "ijson-3.4.0.post0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:43059ae0d657b11c5ddb11d149bc400c44f9e514fb8663057e9b2ea4d8d44c1f"},
{file = "ijson-3.4.0.post0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0d3e82963096579d1385c06b2559570d7191e225664b7fa049617da838e1a4a4"},
{file = "ijson-3.4.0.post0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:461ce4e87a21a261b60c0a68a2ad17c7dd214f0b90a0bec7e559a66b6ae3bd7e"},
{file = "ijson-3.4.0.post0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:890cf6610c9554efcb9765a93e368efeb5bb6135f59ce0828d92eaefff07fde5"},
{file = "ijson-3.4.0.post0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6793c29a5728e7751a7df01be58ba7da9b9690c12bf79d32094c70a908fa02b9"},
{file = "ijson-3.4.0.post0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a56b6674d7feec0401c91f86c376f4e3d8ff8129128a8ad21ca43ec0b1242f79"},
{file = "ijson-3.4.0.post0-cp310-cp310-win32.whl", hash = "sha256:01767fcbd75a5fa5a626069787b41f04681216b798510d5f63bcf66884386368"},
{file = "ijson-3.4.0.post0-cp310-cp310-win_amd64.whl", hash = "sha256:09127c06e5dec753feb9e4b8c5f6a23603d1cd672d098159a17e53a73b898eec"},
{file = "ijson-3.4.0.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b473112e72c0c506da425da3278367b6680f340ecc093084693a1e819d28435"},
{file = "ijson-3.4.0.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:043f9b7cf9cc744263a78175e769947733710d2412d25180df44b1086b23ebd5"},
{file = "ijson-3.4.0.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b55e49045f4c8031f3673f56662fd828dc9e8d65bd3b03a9420dda0d370e64ba"},
{file = "ijson-3.4.0.post0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:11f13b73194ea2a5a8b4a2863f25b0b4624311f10db3a75747b510c4958179b0"},
{file = "ijson-3.4.0.post0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:659acb2843433e080c271ecedf7d19c71adde1ee5274fc7faa2fec0a793f9f1c"},
{file = "ijson-3.4.0.post0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:deda4cfcaafa72ca3fa845350045b1d0fef9364ec9f413241bb46988afbe6ee6"},
{file = "ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47352563e8c594360bacee2e0753e97025f0861234722d02faace62b1b6d2b2a"},
{file = "ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5a48b9486242d1295abe7fd0fbb6308867da5ca3f69b55c77922a93c2b6847aa"},
{file = "ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c0886234d1fae15cf4581a430bdba03d79251c1ab3b07e30aa31b13ef28d01c"},
{file = "ijson-3.4.0.post0-cp311-cp311-win32.whl", hash = "sha256:fecae19b5187d92900c73debb3a979b0b3290a53f85df1f8f3c5ba7d1e9fb9cb"},
{file = "ijson-3.4.0.post0-cp311-cp311-win_amd64.whl", hash = "sha256:b39dbf87071f23a23c8077eea2ae7cfeeca9ff9ffec722dfc8b5f352e4dd729c"},
{file = "ijson-3.4.0.post0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b607a500fca26101be47d2baf7cddb457b819ab60a75ce51ed1092a40da8b2f9"},
{file = "ijson-3.4.0.post0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4827d9874a6a81625412c59f7ca979a84d01f7f6bfb3c6d4dc4c46d0382b14e0"},
{file = "ijson-3.4.0.post0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4d4afec780881edb2a0d2dd40b1cdbe246e630022d5192f266172a0307986a7"},
{file = "ijson-3.4.0.post0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432fb60ffb952926f9438e0539011e2dfcd108f8426ee826ccc6173308c3ff2c"},
{file = "ijson-3.4.0.post0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:54a0e3e05d9a0c95ecba73d9579f146cf6d5c5874116c849dba2d39a5f30380e"},
{file = "ijson-3.4.0.post0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05807edc0bcbd222dc6ea32a2b897f0c81dc7f12c8580148bc82f6d7f5e7ec7b"},
{file = "ijson-3.4.0.post0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a5269af16f715855d9864937f9dd5c348ca1ac49cee6a2c7a1b7091c159e874f"},
{file = "ijson-3.4.0.post0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b200df83c901f5bfa416d069ac71077aa1608f854a4c50df1b84ced560e9c9ec"},
{file = "ijson-3.4.0.post0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6458bd8e679cdff459a0a5e555b107c3bbacb1f382da3fe0f40e392871eb518d"},
{file = "ijson-3.4.0.post0-cp312-cp312-win32.whl", hash = "sha256:55f7f656b5986326c978cbb3a9eea9e33f3ef6ecc4535b38f1d452c731da39ab"},
{file = "ijson-3.4.0.post0-cp312-cp312-win_amd64.whl", hash = "sha256:e15833dcf6f6d188fdc624a31cd0520c3ba21b6855dc304bc7c1a8aeca02d4ac"},
{file = "ijson-3.4.0.post0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:114ed248166ac06377e87a245a158d6b98019d2bdd3bb93995718e0bd996154f"},
{file = "ijson-3.4.0.post0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffb21203736b08fe27cb30df6a4f802fafb9ef7646c5ff7ef79569b63ea76c57"},
{file = "ijson-3.4.0.post0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:07f20ecd748602ac7f18c617637e53bd73ded7f3b22260bba3abe401a7fc284e"},
{file = "ijson-3.4.0.post0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:27aa193d47ffc6bc4e45453896ad98fb089a367e8283b973f1fe5c0198b60b4e"},
{file = "ijson-3.4.0.post0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ccddb2894eb7af162ba43b9475ac5825d15d568832f82eb8783036e5d2aebd42"},
{file = "ijson-3.4.0.post0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61ab0b8c5bf707201dc67e02c116f4b6545c4afd7feb2264b989d242d9c4348a"},
{file = "ijson-3.4.0.post0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:254cfb8c124af68327a0e7a49b50bbdacafd87c4690a3d62c96eb01020a685ef"},
{file = "ijson-3.4.0.post0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04ac9ca54db20f82aeda6379b5f4f6112fdb150d09ebce04affeab98a17b4ed3"},
{file = "ijson-3.4.0.post0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a603d7474bf35e7b3a8e49c8dabfc4751841931301adff3f3318171c4e407f32"},
{file = "ijson-3.4.0.post0-cp313-cp313-win32.whl", hash = "sha256:ec5bb1520cb212ebead7dba048bb9b70552c3440584f83b01b0abc96862e2a09"},
{file = "ijson-3.4.0.post0-cp313-cp313-win_amd64.whl", hash = "sha256:3505dff18bdeb8b171eb28af6df34857e2be80dc01e2e3b624e77215ad58897f"},
{file = "ijson-3.4.0.post0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:45a0b1c833ed2620eaf8da958f06ac8351c59e5e470e078400d23814670ed708"},
{file = "ijson-3.4.0.post0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7809ec8c8f40228edaaa089f33e811dff4c5b8509702652870d3f286c9682e27"},
{file = "ijson-3.4.0.post0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cf4a34c2cfe852aee75c89c05b0a4531c49dc0be27eeed221afd6fbf9c3e149c"},
{file = "ijson-3.4.0.post0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a39d5d36067604b26b78de70b8951c90e9272450642661fe531a8f7a6936a7fa"},
{file = "ijson-3.4.0.post0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83fc738d81c9ea686b452996110b8a6678296c481e0546857db24785bff8da92"},
{file = "ijson-3.4.0.post0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b2a81aee91633868f5b40280e2523f7c5392e920a5082f47c5e991e516b483f6"},
{file = "ijson-3.4.0.post0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:56169e298c5a2e7196aaa55da78ddc2415876a74fe6304f81b1eb0d3273346f7"},
{file = "ijson-3.4.0.post0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eeb9540f0b1a575cbb5968166706946458f98c16e7accc6f2fe71efa29864241"},
{file = "ijson-3.4.0.post0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ba3478ff0bb49d7ba88783f491a99b6e3fa929c930ab062d2bb7837e6a38fe88"},
{file = "ijson-3.4.0.post0-cp313-cp313t-win32.whl", hash = "sha256:b005ce84e82f28b00bf777a464833465dfe3efa43a0a26c77b5ac40723e1a728"},
{file = "ijson-3.4.0.post0-cp313-cp313t-win_amd64.whl", hash = "sha256:fe9c84c9b1c8798afa407be1cea1603401d99bfc7c34497e19f4f5e5ddc9b441"},
{file = "ijson-3.4.0.post0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da6a21b88cbf5ecbc53371283988d22c9643aa71ae2873bbeaefd2dea3b6160b"},
{file = "ijson-3.4.0.post0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cf24a48a1c3ca9d44a04feb59ccefeb9aa52bb49b9cb70ad30518c25cce74bb7"},
{file = "ijson-3.4.0.post0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d14427d366f95f21adcb97d0ed1f6d30f6fdc04d0aa1e4de839152c50c2b8d65"},
{file = "ijson-3.4.0.post0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339d49f6c5d24051c85d9226be96d2d56e633cb8b7d09dd8099de8d8b51a97e2"},
{file = "ijson-3.4.0.post0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7206afcb396aaef66c2b066997b4e9d9042c4b7d777f4d994e9cec6d322c2fe6"},
{file = "ijson-3.4.0.post0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c8dd327da225887194fe8b93f2b3c9c256353e14a6b9eefc940ed17fde38f5b8"},
{file = "ijson-3.4.0.post0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4810546e66128af51fd4a0c9a640e84e8508e9c15c4f247d8a3e3253b20e1465"},
{file = "ijson-3.4.0.post0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:103a0838061297d063bca81d724b0958b616f372bd893bbc278320152252c652"},
{file = "ijson-3.4.0.post0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:40007c977e230e04118b27322f25a72ae342a3d61464b2057fcd9b21eeb7427a"},
{file = "ijson-3.4.0.post0-cp314-cp314-win32.whl", hash = "sha256:f932969fc1fd4449ca141cf5f47ff357656a154a361f28d9ebca0badc5b02297"},
{file = "ijson-3.4.0.post0-cp314-cp314-win_amd64.whl", hash = "sha256:3ed19b1e4349240773a8ce4a4bfa450892d4a57949c02c515cd6be5a46b7696a"},
{file = "ijson-3.4.0.post0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:226447e40ca9340a39ed07d68ea02ee14b52cb4fe649425b256c1f0073531c83"},
{file = "ijson-3.4.0.post0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c88f0669d45d4b1aa017c9b68d378e7cd15d188dfb6f0209adc78b7f45590a7"},
{file = "ijson-3.4.0.post0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:56b3089dc28c12492d92cc4896d2be585a89ecae34e25d08c1df88f21815cb50"},
{file = "ijson-3.4.0.post0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c117321cfa7b749cc1213f9b4c80dc958f0a206df98ec038ae4bcbbdb8463a15"},
{file = "ijson-3.4.0.post0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8311f48db6a33116db5c81682f08b6e2405501a4b4e460193ae69fec3cd1f87a"},
{file = "ijson-3.4.0.post0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91c61a3e63e04da648737e6b4abd537df1b46fb8cdf3219b072e790bb3c1a46b"},
{file = "ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1709171023ce82651b2f132575c2e6282e47f64ad67bd3260da476418d0e7895"},
{file = "ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:5f0a72b1e3c0f78551670c12b2fdc1bf05f2796254d9c2055ba319bec2216020"},
{file = "ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b982a3597b0439ce9c8f4cfc929d86c6ed43907908be1e8463a34dc35fe5b258"},
{file = "ijson-3.4.0.post0-cp314-cp314t-win32.whl", hash = "sha256:4e39bfdc36b0b460ef15a06550a6a385c64c81f7ac205ccff39bd45147918912"},
{file = "ijson-3.4.0.post0-cp314-cp314t-win_amd64.whl", hash = "sha256:17e45262a5ddef39894013fb1548ee7094e444c8389eb1a97f86708b19bea03e"},
{file = "ijson-3.4.0.post0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:35eb2760a42fd9461358b4be131287587b49ff504fc37fa3014dca6c27c343f4"},
{file = "ijson-3.4.0.post0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f82ca7abfb3ef3cf2194c71dad634572bcccd62a5dd466649f78fe73d492c860"},
{file = "ijson-3.4.0.post0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:97f5ef3d839fc24b0ad47e8b31b4751ae72c5d83606e3ee4c92bb25965c03a4f"},
{file = "ijson-3.4.0.post0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a2c873742e9f7e21378516217d81d6fa11d34bae860ed364832c00ab1dbf37ed"},
{file = "ijson-3.4.0.post0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f8b9ffa2c2dfe3289da9aec4e5ab52684fa2b2da2c853c7891b360ec46fba07"},
{file = "ijson-3.4.0.post0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0634b21188c67e5cf471cc1d30d193d19f521d89e2125ab1fb602aa8ae61e050"},
{file = "ijson-3.4.0.post0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3752dd6f51ef58a71799de745649deff293e959700f1b7f5b1989618da366f24"},
{file = "ijson-3.4.0.post0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:57db77f4ea3eca09f519f627d9f9c76eb862b30edef5d899f031feeed94f05a1"},
{file = "ijson-3.4.0.post0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:435270a4b75667305f6df3226e5224e83cd6906022d7fdcc9df05caae725f796"},
{file = "ijson-3.4.0.post0-cp39-cp39-win32.whl", hash = "sha256:742c211b004ab51ccad2b301525d8a6eb2cf68a5fb82d78836f3a351eec44d4e"},
{file = "ijson-3.4.0.post0-cp39-cp39-win_amd64.whl", hash = "sha256:35aaa979da875fa92bea5dc5969b1541b4912b165091761785459a43f0c20946"},
{file = "ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:add9242f886eae844a7410b84aee2bbb8bdc83c624f227cb1fdb2d0476a96cb1"},
{file = "ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:69718ed41710dfcaa7564b0af42abc05875d4f7aaa24627c808867ef32634bc7"},
{file = "ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:636b6eca96c6c43c04629c6b37fad0181662eaacf9877c71c698485637f752f9"},
{file = "ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb5e73028f6e63d27b3d286069fe350ed80a4ccc493b022b590fea4bb086710d"},
{file = "ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:461acf4320219459dabe5ed90a45cb86c9ba8cc6d6db9dad0d9427d42f57794c"},
{file = "ijson-3.4.0.post0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a0fedf09c0f6ffa2a99e7e7fd9c5f3caf74e655c1ee015a0797383e99382ebc3"},
{file = "ijson-3.4.0.post0.tar.gz", hash = "sha256:9aa02dc70bb245670a6ca7fba737b992aeeb4895360980622f7e568dbf23e41e"},
]
[[package]]
@@ -1342,71 +1352,74 @@ files = [
[[package]]
name = "msgpack"
version = "1.1.1"
version = "1.1.2"
description = "MessagePack serializer"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "msgpack-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:353b6fc0c36fde68b661a12949d7d49f8f51ff5fa019c1e47c87c4ff34b080ed"},
{file = "msgpack-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:79c408fcf76a958491b4e3b103d1c417044544b68e96d06432a189b43d1215c8"},
{file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78426096939c2c7482bf31ef15ca219a9e24460289c00dd0b94411040bb73ad2"},
{file = "msgpack-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b17ba27727a36cb73aabacaa44b13090feb88a01d012c0f4be70c00f75048b4"},
{file = "msgpack-1.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a17ac1ea6ec3c7687d70201cfda3b1e8061466f28f686c24f627cae4ea8efd0"},
{file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:88d1e966c9235c1d4e2afac21ca83933ba59537e2e2727a999bf3f515ca2af26"},
{file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6d58656842e1b2ddbe07f43f56b10a60f2ba5826164910968f5933e5178af75"},
{file = "msgpack-1.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96decdfc4adcbc087f5ea7ebdcfd3dee9a13358cae6e81d54be962efc38f6338"},
{file = "msgpack-1.1.1-cp310-cp310-win32.whl", hash = "sha256:6640fd979ca9a212e4bcdf6eb74051ade2c690b862b679bfcb60ae46e6dc4bfd"},
{file = "msgpack-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:8b65b53204fe1bd037c40c4148d00ef918eb2108d24c9aaa20bc31f9810ce0a8"},
{file = "msgpack-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:71ef05c1726884e44f8b1d1773604ab5d4d17729d8491403a705e649116c9558"},
{file = "msgpack-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36043272c6aede309d29d56851f8841ba907a1a3d04435e43e8a19928e243c1d"},
{file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a32747b1b39c3ac27d0670122b57e6e57f28eefb725e0b625618d1b59bf9d1e0"},
{file = "msgpack-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a8b10fdb84a43e50d38057b06901ec9da52baac6983d3f709d8507f3889d43f"},
{file = "msgpack-1.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0c325c3f485dc54ec298d8b024e134acf07c10d494ffa24373bea729acf704"},
{file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:88daaf7d146e48ec71212ce21109b66e06a98e5e44dca47d853cbfe171d6c8d2"},
{file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8b55ea20dc59b181d3f47103f113e6f28a5e1c89fd5b67b9140edb442ab67f2"},
{file = "msgpack-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a28e8072ae9779f20427af07f53bbb8b4aa81151054e882aee333b158da8752"},
{file = "msgpack-1.1.1-cp311-cp311-win32.whl", hash = "sha256:7da8831f9a0fdb526621ba09a281fadc58ea12701bc709e7b8cbc362feabc295"},
{file = "msgpack-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fd1b58e1431008a57247d6e7cc4faa41c3607e8e7d4aaf81f7c29ea013cb458"},
{file = "msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae497b11f4c21558d95de9f64fff7053544f4d1a17731c866143ed6bb4591238"},
{file = "msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33be9ab121df9b6b461ff91baac6f2731f83d9b27ed948c5b9d1978ae28bf157"},
{file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f64ae8fe7ffba251fecb8408540c34ee9df1c26674c50c4544d72dbf792e5ce"},
{file = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a494554874691720ba5891c9b0b39474ba43ffb1aaf32a5dac874effb1619e1a"},
{file = "msgpack-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb643284ab0ed26f6957d969fe0dd8bb17beb567beb8998140b5e38a90974f6c"},
{file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d275a9e3c81b1093c060c3837e580c37f47c51eca031f7b5fb76f7b8470f5f9b"},
{file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fd6b577e4541676e0cc9ddc1709d25014d3ad9a66caa19962c4f5de30fc09ef"},
{file = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb29aaa613c0a1c40d1af111abf025f1732cab333f96f285d6a93b934738a68a"},
{file = "msgpack-1.1.1-cp312-cp312-win32.whl", hash = "sha256:870b9a626280c86cff9c576ec0d9cbcc54a1e5ebda9cd26dab12baf41fee218c"},
{file = "msgpack-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:5692095123007180dca3e788bb4c399cc26626da51629a31d40207cb262e67f4"},
{file = "msgpack-1.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3765afa6bd4832fc11c3749be4ba4b69a0e8d7b728f78e68120a157a4c5d41f0"},
{file = "msgpack-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8ddb2bcfd1a8b9e431c8d6f4f7db0773084e107730ecf3472f1dfe9ad583f3d9"},
{file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:196a736f0526a03653d829d7d4c5500a97eea3648aebfd4b6743875f28aa2af8"},
{file = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d592d06e3cc2f537ceeeb23d38799c6ad83255289bb84c2e5792e5a8dea268a"},
{file = "msgpack-1.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4df2311b0ce24f06ba253fda361f938dfecd7b961576f9be3f3fbd60e87130ac"},
{file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e4141c5a32b5e37905b5940aacbc59739f036930367d7acce7a64e4dec1f5e0b"},
{file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b1ce7f41670c5a69e1389420436f41385b1aa2504c3b0c30620764b15dded2e7"},
{file = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4147151acabb9caed4e474c3344181e91ff7a388b888f1e19ea04f7e73dc7ad5"},
{file = "msgpack-1.1.1-cp313-cp313-win32.whl", hash = "sha256:500e85823a27d6d9bba1d057c871b4210c1dd6fb01fbb764e37e4e8847376323"},
{file = "msgpack-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:6d489fba546295983abd142812bda76b57e33d0b9f5d5b71c09a583285506f69"},
{file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bba1be28247e68994355e028dcd668316db30c1f758d3241a7b903ac78dcd285"},
{file = "msgpack-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f93dcddb243159c9e4109c9750ba5b335ab8d48d9522c5308cd05d7e3ce600"},
{file = "msgpack-1.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fbbc0b906a24038c9958a1ba7ae0918ad35b06cb449d398b76a7d08470b0ed9"},
{file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:61e35a55a546a1690d9d09effaa436c25ae6130573b6ee9829c37ef0f18d5e78"},
{file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:1abfc6e949b352dadf4bce0eb78023212ec5ac42f6abfd469ce91d783c149c2a"},
{file = "msgpack-1.1.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:996f2609ddf0142daba4cefd767d6db26958aac8439ee41db9cc0db9f4c4c3a6"},
{file = "msgpack-1.1.1-cp38-cp38-win32.whl", hash = "sha256:4d3237b224b930d58e9d83c81c0dba7aacc20fcc2f89c1e5423aa0529a4cd142"},
{file = "msgpack-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:da8f41e602574ece93dbbda1fab24650d6bf2a24089f9e9dbb4f5730ec1e58ad"},
{file = "msgpack-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5be6b6bc52fad84d010cb45433720327ce886009d862f46b26d4d154001994b"},
{file = "msgpack-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a89cd8c087ea67e64844287ea52888239cbd2940884eafd2dcd25754fb72232"},
{file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d75f3807a9900a7d575d8d6674a3a47e9f227e8716256f35bc6f03fc597ffbf"},
{file = "msgpack-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d182dac0221eb8faef2e6f44701812b467c02674a322c739355c39e94730cdbf"},
{file = "msgpack-1.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b13fe0fb4aac1aa5320cd693b297fe6fdef0e7bea5518cbc2dd5299f873ae90"},
{file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:435807eeb1bc791ceb3247d13c79868deb22184e1fc4224808750f0d7d1affc1"},
{file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4835d17af722609a45e16037bb1d4d78b7bdf19d6c0128116d178956618c4e88"},
{file = "msgpack-1.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8ef6e342c137888ebbfb233e02b8fbd689bb5b5fcc59b34711ac47ebd504478"},
{file = "msgpack-1.1.1-cp39-cp39-win32.whl", hash = "sha256:61abccf9de335d9efd149e2fff97ed5974f2481b3353772e8e2dd3402ba2bd57"},
{file = "msgpack-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:40eae974c873b2992fd36424a5d9407f93e97656d999f43fca9d29f820899084"},
{file = "msgpack-1.1.1.tar.gz", hash = "sha256:77b79ce34a2bdab2594f490c8e80dd62a02d650b91a75159a63ec413b8d104cd"},
{file = "msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2"},
{file = "msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87"},
{file = "msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251"},
{file = "msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a"},
{file = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f"},
{file = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f"},
{file = "msgpack-1.1.2-cp310-cp310-win32.whl", hash = "sha256:e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9"},
{file = "msgpack-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa"},
{file = "msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c"},
{file = "msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0"},
{file = "msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296"},
{file = "msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef"},
{file = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c"},
{file = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e"},
{file = "msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e"},
{file = "msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68"},
{file = "msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406"},
{file = "msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa"},
{file = "msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb"},
{file = "msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f"},
{file = "msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42"},
{file = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9"},
{file = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620"},
{file = "msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029"},
{file = "msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b"},
{file = "msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69"},
{file = "msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf"},
{file = "msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7"},
{file = "msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999"},
{file = "msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e"},
{file = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162"},
{file = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794"},
{file = "msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c"},
{file = "msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9"},
{file = "msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84"},
{file = "msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00"},
{file = "msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939"},
{file = "msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e"},
{file = "msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931"},
{file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014"},
{file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2"},
{file = "msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717"},
{file = "msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b"},
{file = "msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af"},
{file = "msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a"},
{file = "msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b"},
{file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245"},
{file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90"},
{file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20"},
{file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27"},
{file = "msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b"},
{file = "msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff"},
{file = "msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46"},
{file = "msgpack-1.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea5405c46e690122a76531ab97a079e184c0daf491e588592d6a23d3e32af99e"},
{file = "msgpack-1.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fba231af7a933400238cb357ecccf8ab5d51535ea95d94fc35b7806218ff844"},
{file = "msgpack-1.1.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a8f6e7d30253714751aa0b0c84ae28948e852ee7fb0524082e6716769124bc23"},
{file = "msgpack-1.1.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:94fd7dc7d8cb0a54432f296f2246bc39474e017204ca6f4ff345941d4ed285a7"},
{file = "msgpack-1.1.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:350ad5353a467d9e3b126d8d1b90fe05ad081e2e1cef5753f8c345217c37e7b8"},
{file = "msgpack-1.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6bde749afe671dc44893f8d08e83bf475a1a14570d67c4bb5cec5573463c8833"},
{file = "msgpack-1.1.2-cp39-cp39-win32.whl", hash = "sha256:ad09b984828d6b7bb52d1d1d0c9be68ad781fa004ca39216c8a1e63c0f34ba3c"},
{file = "msgpack-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:67016ae8c8965124fdede9d3769528ad8284f14d635337ffa6a713a580f6c030"},
{file = "msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e"},
]
[[package]]
@@ -3293,4 +3306,4 @@ url-preview = ["lxml"]
[metadata]
lock-version = "2.1"
python-versions = "^3.9.0"
content-hash = "0058b93ca13a3f2a0cfc28485ddd8202c42d0015dbaf3b9692e43f37fe2a0be6"
content-hash = "5d71c862b924bc2af936cb6fef264a023213153543f738af31357deaf6de19b8"

View File

@@ -78,6 +78,12 @@ select = [
"LOG",
# flake8-logging-format
"G",
# pyupgrade
"UP006",
]
extend-safe-fixes = [
# pyupgrade
"UP006"
]
[tool.ruff.lint.isort]
@@ -101,7 +107,7 @@ module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
version = "1.140.0"
version = "1.141.0rc1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
@@ -319,7 +325,7 @@ all = [
# - systemd: this is a system-based requirement
]
[tool.poetry.dev-dependencies]
[tool.poetry.group.dev.dependencies]
# We pin development dependencies in poetry.lock so that our tests don't start
# failing on new releases. Keeping lower bounds loose here means that dependabot
# can bump versions without having to update the content-hash in the lockfile.
@@ -381,10 +387,10 @@ build-backend = "poetry.core.masonry.api"
# Skip unsupported platforms (by us or by Rust).
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
# We skip:
# - CPython and PyPy 3.8: EOLed
# - CPython 3.8: EOLed
# - musllinux i686: excluded to reduce number of wheels we build.
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
skip = "cp38* pp38* *-musllinux_i686"
skip = "cp38* *-musllinux_i686"
# Enable non-default builds.
# "pypy" used to be included by default up until cibuildwheel 3.
enable = "pypy"

View File

@@ -1,5 +1,5 @@
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.140/synapse-config.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.141/synapse-config.schema.json
type: object
properties:
modules:

View File

@@ -18,7 +18,7 @@ import sys
import threading
from concurrent.futures import ThreadPoolExecutor
from types import FrameType
from typing import Collection, Optional, Sequence, Set
from typing import Collection, Optional, Sequence
# These are expanded inside the dockerfile to be a fully qualified image name.
# e.g. docker.io/library/debian:bullseye
@@ -54,7 +54,7 @@ class Builder:
):
self.redirect_stdout = redirect_stdout
self._docker_build_args = tuple(docker_build_args or ())
self.active_containers: Set[str] = set()
self.active_containers: set[str] = set()
self._lock = threading.Lock()
self._failed = False

View File

@@ -21,7 +21,6 @@
#
import sys
from pathlib import Path
from typing import Dict, List
import tomli
@@ -33,7 +32,7 @@ def main() -> None:
# Poetry 1.3+ lockfile format:
# There's a `files` inline table in each [[package]]
packages_to_assets: Dict[str, List[Dict[str, str]]] = {
packages_to_assets: dict[str, list[dict[str, str]]] = {
package["name"]: package["files"] for package in lockfile_content["package"]
}

View File

@@ -47,11 +47,7 @@ from contextlib import contextmanager
from typing import (
Any,
Callable,
Dict,
Generator,
List,
Set,
Type,
TypeVar,
)
@@ -69,7 +65,7 @@ from synapse._pydantic_compat import (
logger = logging.getLogger(__name__)
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [
constr,
conbytes,
conint,
@@ -145,7 +141,7 @@ class PatchedBaseModel(PydanticBaseModel):
"""
@classmethod
def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object):
def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object):
for field in cls.__fields__.values():
# Note that field.type_ and field.outer_type are computed based on the
# annotation type, see pydantic.fields.ModelField._type_analysis
@@ -212,7 +208,7 @@ def lint() -> int:
return os.EX_DATAERR if failures else os.EX_OK
def do_lint() -> Set[str]:
def do_lint() -> set[str]:
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
failures = set()
@@ -258,8 +254,8 @@ def run_test_snippet(source: str) -> None:
# > Remember that at the module level, globals and locals are the same dictionary.
# > If exec gets two separate objects as globals and locals, the code will be
# > executed as if it were embedded in a class definition.
globals_: Dict[str, object]
locals_: Dict[str, object]
globals_: dict[str, object]
locals_: dict[str, object]
globals_ = locals_ = {}
exec(textwrap.dedent(source), globals_, locals_)
@@ -394,10 +390,10 @@ class TestFieldTypeInspection(unittest.TestCase):
("bool"),
("Optional[str]",),
("Union[None, str]",),
("List[str]",),
("List[List[str]]",),
("Dict[StrictStr, str]",),
("Dict[str, StrictStr]",),
("list[str]",),
("list[list[str]]",),
("dict[StrictStr, str]",),
("dict[str, StrictStr]",),
("TypedDict('D', x=int)",),
]
)
@@ -425,9 +421,9 @@ class TestFieldTypeInspection(unittest.TestCase):
("constr(strict=True, min_length=10)",),
("Optional[StrictStr]",),
("Union[None, StrictStr]",),
("List[StrictStr]",),
("List[List[StrictStr]]",),
("Dict[StrictStr, StrictStr]",),
("list[StrictStr]",),
("list[list[StrictStr]]",),
("dict[StrictStr, StrictStr]",),
("TypedDict('D', x=StrictInt)",),
]
)

View File

@@ -5,7 +5,7 @@
# Also checks that schema deltas do not try and create or drop indices.
import re
from typing import Any, Dict, List
from typing import Any
import click
import git
@@ -48,16 +48,16 @@ def main(force_colors: bool) -> None:
r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py")
locals: Dict[str, Any] = {}
locals: dict[str, Any] = {}
exec(r, locals)
current_schema_version = locals["SCHEMA_VERSION"]
diffs: List[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
diffs: list[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
# Get the schema version of the local file to check against current schema on develop
with open("synapse/storage/schema/__init__.py") as file:
local_schema = file.read()
new_locals: Dict[str, Any] = {}
new_locals: dict[str, Any] = {}
exec(local_schema, new_locals)
local_schema_version = new_locals["SCHEMA_VERSION"]

View File

@@ -43,7 +43,7 @@ import argparse
import base64
import json
import sys
from typing import Any, Dict, Mapping, Optional, Tuple, Union
from typing import Any, Mapping, Optional, Union
from urllib import parse as urlparse
import requests
@@ -147,7 +147,7 @@ def request(
s = requests.Session()
s.mount("matrix-federation://", MatrixConnectionAdapter())
headers: Dict[str, str] = {
headers: dict[str, str] = {
"Authorization": authorization_headers[0],
}
@@ -303,7 +303,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
request: PreparedRequest,
verify: Optional[Union[bool, str]],
proxies: Optional[Mapping[str, str]] = None,
cert: Optional[Union[Tuple[str, str], str]] = None,
cert: Optional[Union[tuple[str, str], str]] = None,
) -> HTTPConnectionPool:
# overrides the get_connection_with_tls_context() method in the base class
parsed = urlparse.urlsplit(request.url)
@@ -326,7 +326,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
)
@staticmethod
def _lookup(server_name: str) -> Tuple[str, int, str]:
def _lookup(server_name: str) -> tuple[str, int, str]:
"""
Do an SRV lookup on a server name and return the host:port to connect to
Given the server_name (after any .well-known lookup), return the host, port and

View File

@@ -24,7 +24,7 @@ can crop up, e.g the cache descriptors.
"""
import enum
from typing import Callable, Mapping, Optional, Tuple, Type, Union
from typing import Callable, Mapping, Optional, Union
import attr
import mypy.types
@@ -184,8 +184,8 @@ should be in the source code.
# Unbound at this point because we don't know the mypy version yet.
# This is set in the `plugin(...)` function below.
MypyPydanticPluginClass: Type[Plugin]
MypyZopePluginClass: Type[Plugin]
MypyPydanticPluginClass: type[Plugin]
MypyZopePluginClass: type[Plugin]
class SynapsePlugin(Plugin):
@@ -795,7 +795,7 @@ AT_CACHED_MUTABLE_RETURN = ErrorCode(
def is_cacheable(
rt: mypy.types.Type, signature: CallableType, verbose: bool
) -> Tuple[bool, Optional[str]]:
) -> tuple[bool, Optional[str]]:
"""
Check if a particular type is cachable.
@@ -905,7 +905,7 @@ def is_cacheable(
return False, f"Don't know how to handle {type(rt).__qualname__} return type"
def plugin(version: str) -> Type[SynapsePlugin]:
def plugin(version: str) -> type[SynapsePlugin]:
global MypyPydanticPluginClass, MypyZopePluginClass
# This is the entry point of the plugin, and lets us deal with the fact
# that the mypy plugin interface is *not* stable by looking at the version

View File

@@ -32,12 +32,13 @@ import time
import urllib.request
from os import path
from tempfile import TemporaryDirectory
from typing import Any, List, Match, Optional, Union
from typing import Any, Match, Optional, Union
import attr
import click
import git
import github
import github.Auth
from click.exceptions import ClickException
from git import GitCommandError, Repo
from github import BadCredentialsException, Github
@@ -429,7 +430,7 @@ def _publish(gh_token: str) -> None:
if gh_token:
# Test that the GH Token is valid before continuing.
gh = Github(gh_token)
gh = Github(auth=github.Auth.Token(token=gh_token))
gh.get_user()
# Make sure we're in a git repo.
@@ -442,7 +443,7 @@ def _publish(gh_token: str) -> None:
return
# Publish the draft release
gh = Github(gh_token)
gh = Github(auth=github.Auth.Token(token=gh_token))
gh_repo = gh.get_repo("element-hq/synapse")
for release in gh_repo.get_releases():
if release.title == tag_name:
@@ -487,8 +488,13 @@ def _upload(gh_token: Optional[str]) -> None:
click.echo(f"Tag {tag_name} ({tag.commit}) is not currently checked out!")
click.get_current_context().abort()
if gh_token:
gh = Github(auth=github.Auth.Token(token=gh_token))
else:
# Use github anonymously.
gh = Github()
# Query all the assets corresponding to this release.
gh = Github(gh_token)
gh_repo = gh.get_repo("element-hq/synapse")
gh_release = gh_repo.get_release(tag_name)
@@ -596,16 +602,6 @@ def _wait_for_actions(gh_token: Optional[str]) -> None:
if len(resp["workflow_runs"]) == 0:
continue
# Warn the user if any workflows are still queued. They might need to fix something.
if any(workflow["status"] == "queued" for workflow in resp["workflow_runs"]):
_notify("Warning: at least one release workflow is still queued...")
if not click.confirm("Continue waiting for queued assets?", default=True):
click.echo(
"Continuing on with the release. Note that you may need to upload missing assets manually later."
)
break
continue
if all(
workflow["status"] != "in_progress" for workflow in resp["workflow_runs"]
):
@@ -725,18 +721,31 @@ def _announce() -> None:
current_version = get_package_version()
tag_name = f"v{current_version}"
is_rc = "rc" in tag_name
release_text = f"""
### Synapse {current_version} {"🧪" if is_rc else "🚀"}
click.echo(
f"""
Hi everyone. Synapse {current_version} has just been released.
"""
if "rc" in tag_name:
release_text += (
"\nThis is a release candidate. Please help us test it out "
"before the final release by deploying it to non-production environments, "
"and reporting any issues you find to "
"[the issue tracker](https://github.com/element-hq/synapse/issues). Thanks!\n"
)
release_text += f"""
[notes](https://github.com/element-hq/synapse/releases/tag/{tag_name}) | \
[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \
[debs](https://packages.matrix.org/debian/) | \
[pypi](https://pypi.org/project/matrix-synapse/{current_version}/)"""
)
if "rc" in tag_name:
click.echo(release_text)
if is_rc:
click.echo(
"""
Announce the RC in
@@ -761,7 +770,7 @@ Ask the designated people to do the blog and tweets."""
def full(gh_token: str) -> None:
if gh_token:
# Test that the GH Token is valid before continuing.
gh = Github(gh_token)
gh = Github(auth=github.Auth.Token(token=gh_token))
gh.get_user()
click.echo("1. If this is a security release, read the security wiki page.")
@@ -830,8 +839,12 @@ def get_repo_and_check_clean_checkout(
raise click.ClickException(
f"{path} is not a git repository (expecting a {name} repository)."
)
if repo.is_dirty():
raise click.ClickException(f"Uncommitted changes exist in {path}.")
while repo.is_dirty():
if not click.confirm(
f"Uncommitted changes exist in {path}. Commit or stash them. Ready to continue?"
):
raise click.ClickException("Aborted.")
return repo
@@ -843,7 +856,7 @@ def check_valid_gh_token(gh_token: Optional[str]) -> None:
return
try:
gh = Github(gh_token)
gh = Github(auth=github.Auth.Token(token=gh_token))
# We need to lookup name to trigger a request.
_name = gh.get_user().name
@@ -890,7 +903,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
start_line: int
end_line: Optional[int] = None # Is none if its the last entry
headings: List[VersionSection] = []
headings: list[VersionSection] = []
for i, token in enumerate(tokens):
# We look for level 1 headings (h1 tags).
if token.type != "heading_open" or token.tag != "h1":

View File

@@ -38,7 +38,7 @@ import io
import json
import sys
from collections import defaultdict
from typing import Any, Dict, Iterator, Optional, Tuple
from typing import Any, Iterator, Optional
import git
from packaging import version
@@ -57,7 +57,7 @@ SCHEMA_VERSION_FILES = (
OLDEST_SHOWN_VERSION = version.parse("v1.0")
def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
def get_schema_versions(tag: git.Tag) -> tuple[Optional[int], Optional[int]]:
"""Get the schema and schema compat versions for a tag."""
schema_version = None
schema_compat_version = None
@@ -81,7 +81,7 @@ def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
# SCHEMA_COMPAT_VERSION is sometimes across multiple lines, the easist
# thing to do is exec the code. Luckily it has only ever existed in
# a file which imports nothing else from Synapse.
locals: Dict[str, Any] = {}
locals: dict[str, Any] = {}
exec(schema_file.data_stream.read().decode("utf-8"), {}, locals)
schema_version = locals["SCHEMA_VERSION"]
schema_compat_version = locals.get("SCHEMA_COMPAT_VERSION")

View File

@@ -7,18 +7,14 @@ from __future__ import annotations
from typing import (
Any,
Callable,
Dict,
Hashable,
ItemsView,
Iterable,
Iterator,
KeysView,
List,
Mapping,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
ValuesView,
@@ -35,14 +31,14 @@ _VT_co = TypeVar("_VT_co", covariant=True)
_SD = TypeVar("_SD", bound=SortedDict)
_Key = Callable[[_T], Any]
class SortedDict(Dict[_KT, _VT]):
class SortedDict(dict[_KT, _VT]):
@overload
def __init__(self, **kwargs: _VT) -> None: ...
@overload
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
@overload
def __init__(
self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
) -> None: ...
@overload
def __init__(self, __key: _Key[_KT], **kwargs: _VT) -> None: ...
@@ -52,7 +48,7 @@ class SortedDict(Dict[_KT, _VT]):
) -> None: ...
@overload
def __init__(
self, __key: _Key[_KT], __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
) -> None: ...
@property
def key(self) -> Optional[_Key[_KT]]: ...
@@ -84,8 +80,8 @@ class SortedDict(Dict[_KT, _VT]):
def pop(self, key: _KT) -> _VT: ...
@overload
def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ...
def popitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
def peekitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ...
# Mypy now reports the first overload as an error, because typeshed widened the type
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
@@ -102,9 +98,9 @@ class SortedDict(Dict[_KT, _VT]):
# def update(self, **kwargs: _VT) -> None: ...
def __reduce__(
self,
) -> Tuple[
Type[SortedDict[_KT, _VT]],
Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]],
) -> tuple[
type[SortedDict[_KT, _VT]],
tuple[Callable[[_KT], Any], list[tuple[_KT, _VT]]],
]: ...
def __repr__(self) -> str: ...
def _check(self) -> None: ...
@@ -121,20 +117,20 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
@overload
def __getitem__(self, index: int) -> _KT_co: ...
@overload
def __getitem__(self, index: slice) -> List[_KT_co]: ...
def __getitem__(self, index: slice) -> list[_KT_co]: ...
def __delitem__(self, index: Union[int, slice]) -> None: ...
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]):
def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]):
def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
@overload
def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ...
def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ...
@overload
def __getitem__(self, index: slice) -> List[Tuple[_KT_co, _VT_co]]: ...
def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ...
def __delitem__(self, index: Union[int, slice]) -> None: ...
class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]):
@overload
def __getitem__(self, index: int) -> _VT_co: ...
@overload
def __getitem__(self, index: slice) -> List[_VT_co]: ...
def __getitem__(self, index: slice) -> list[_VT_co]: ...
def __delitem__(self, index: Union[int, slice]) -> None: ...

View File

@@ -9,12 +9,9 @@ from typing import (
Callable,
Iterable,
Iterator,
List,
MutableSequence,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
overload,
@@ -37,11 +34,11 @@ class SortedList(MutableSequence[_T]):
): ...
# NB: currently mypy does not honour return type, see mypy #3307
@overload
def __new__(cls: Type[_SL], iterable: None, key: None) -> _SL: ...
def __new__(cls: type[_SL], iterable: None, key: None) -> _SL: ...
@overload
def __new__(cls: Type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
def __new__(cls: type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
@overload
def __new__(cls: Type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
def __new__(cls: type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
@overload
def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ...
@property
@@ -64,11 +61,11 @@ class SortedList(MutableSequence[_T]):
@overload
def __getitem__(self, index: int) -> _T: ...
@overload
def __getitem__(self, index: slice) -> List[_T]: ...
def __getitem__(self, index: slice) -> list[_T]: ...
@overload
def _getitem(self, index: int) -> _T: ...
@overload
def _getitem(self, index: slice) -> List[_T]: ...
def _getitem(self, index: slice) -> list[_T]: ...
@overload
def __setitem__(self, index: int, value: _T) -> None: ...
@overload
@@ -95,7 +92,7 @@ class SortedList(MutableSequence[_T]):
self,
minimum: Optional[int] = ...,
maximum: Optional[int] = ...,
inclusive: Tuple[bool, bool] = ...,
inclusive: tuple[bool, bool] = ...,
reverse: bool = ...,
) -> Iterator[_T]: ...
def bisect_left(self, value: _T) -> int: ...
@@ -151,14 +148,14 @@ class SortedKeyList(SortedList[_T]):
self,
minimum: Optional[int] = ...,
maximum: Optional[int] = ...,
inclusive: Tuple[bool, bool] = ...,
inclusive: tuple[bool, bool] = ...,
reverse: bool = ...,
) -> Iterator[_T]: ...
def irange_key(
self,
min_key: Optional[Any] = ...,
max_key: Optional[Any] = ...,
inclusive: Tuple[bool, bool] = ...,
inclusive: tuple[bool, bool] = ...,
reserve: bool = ...,
) -> Iterator[_T]: ...
def bisect_left(self, value: _T) -> int: ...

View File

@@ -10,13 +10,9 @@ from typing import (
Hashable,
Iterable,
Iterator,
List,
MutableSet,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
overload,
@@ -37,7 +33,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
) -> None: ...
@classmethod
def _fromset(
cls, values: Set[_T], key: Optional[_Key[_T]] = ...
cls, values: set[_T], key: Optional[_Key[_T]] = ...
) -> SortedSet[_T]: ...
@property
def key(self) -> Optional[_Key[_T]]: ...
@@ -45,7 +41,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
@overload
def __getitem__(self, index: int) -> _T: ...
@overload
def __getitem__(self, index: slice) -> List[_T]: ...
def __getitem__(self, index: slice) -> list[_T]: ...
def __delitem__(self, index: Union[int, slice]) -> None: ...
def __eq__(self, other: Any) -> bool: ...
def __ne__(self, other: Any) -> bool: ...
@@ -94,7 +90,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
def _update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
def __reduce__(
self,
) -> Tuple[Type[SortedSet[_T]], Set[_T], Callable[[_T], Any]]: ...
) -> tuple[type[SortedSet[_T]], set[_T], Callable[[_T], Any]]: ...
def __repr__(self) -> str: ...
def _check(self) -> None: ...
def bisect_left(self, value: _T) -> int: ...
@@ -109,7 +105,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
self,
minimum: Optional[_T] = ...,
maximum: Optional[_T] = ...,
inclusive: Tuple[bool, bool] = ...,
inclusive: tuple[bool, bool] = ...,
reverse: bool = ...,
) -> Iterator[_T]: ...
def index(

View File

@@ -15,7 +15,7 @@
"""Contains *incomplete* type hints for txredisapi."""
from typing import Any, List, Optional, Type, Union
from typing import Any, Optional, Union
from twisted.internet import protocol
from twisted.internet.defer import Deferred
@@ -39,7 +39,7 @@ class RedisProtocol(protocol.Protocol):
class SubscriberProtocol(RedisProtocol):
def __init__(self, *args: object, **kwargs: object): ...
password: Optional[str]
def subscribe(self, channels: Union[str, List[str]]) -> "Deferred[None]": ...
def subscribe(self, channels: Union[str, list[str]]) -> "Deferred[None]": ...
def connectionMade(self) -> None: ...
# type-ignore: twisted.internet.protocol.Protocol provides a default argument for
# `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's
@@ -69,7 +69,7 @@ class UnixConnectionHandler(ConnectionHandler): ...
class RedisFactory(protocol.ReconnectingClientFactory):
continueTrying: bool
handler: ConnectionHandler
pool: List[RedisProtocol]
pool: list[RedisProtocol]
replyTimeout: Optional[int]
def __init__(
self,
@@ -77,7 +77,7 @@ class RedisFactory(protocol.ReconnectingClientFactory):
dbid: Optional[int],
poolsize: int,
isLazy: bool = False,
handler: Type = ConnectionHandler,
handler: type = ConnectionHandler,
charset: str = "utf-8",
password: Optional[str] = None,
replyTimeout: Optional[int] = None,

View File

@@ -24,7 +24,7 @@
import os
import sys
from typing import Any, Dict
from typing import Any
from PIL import ImageFile
@@ -70,7 +70,7 @@ try:
from canonicaljson import register_preserialisation_callback
from immutabledict import immutabledict
def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]:
def _immutabledict_cb(d: immutabledict) -> dict[str, Any]:
try:
return d._dict
except Exception:

View File

@@ -25,7 +25,7 @@ import logging
import re
from collections import defaultdict
from dataclasses import dataclass
from typing import Dict, Iterable, Optional, Pattern, Set, Tuple
from typing import Iterable, Optional, Pattern
import yaml
@@ -81,7 +81,7 @@ class EnumerationResource(HttpServer):
"""
def __init__(self, is_worker: bool) -> None:
self.registrations: Dict[Tuple[str, str], EndpointDescription] = {}
self.registrations: dict[tuple[str, str], EndpointDescription] = {}
self._is_worker = is_worker
def register_paths(
@@ -115,7 +115,7 @@ class EnumerationResource(HttpServer):
def get_registered_paths_for_hs(
hs: HomeServer,
) -> Dict[Tuple[str, str], EndpointDescription]:
) -> dict[tuple[str, str], EndpointDescription]:
"""
Given a homeserver, get all registered endpoints and their descriptions.
"""
@@ -142,7 +142,7 @@ def get_registered_paths_for_hs(
def get_registered_paths_for_default(
worker_app: Optional[str], base_config: HomeServerConfig
) -> Dict[Tuple[str, str], EndpointDescription]:
) -> dict[tuple[str, str], EndpointDescription]:
"""
Given the name of a worker application and a base homeserver configuration,
returns:
@@ -168,9 +168,9 @@ def get_registered_paths_for_default(
def elide_http_methods_if_unconflicting(
registrations: Dict[Tuple[str, str], EndpointDescription],
all_possible_registrations: Dict[Tuple[str, str], EndpointDescription],
) -> Dict[Tuple[str, str], EndpointDescription]:
registrations: dict[tuple[str, str], EndpointDescription],
all_possible_registrations: dict[tuple[str, str], EndpointDescription],
) -> dict[tuple[str, str], EndpointDescription]:
"""
Elides HTTP methods (by replacing them with `*`) if all possible registered methods
can be handled by the worker whose registration map is `registrations`.
@@ -180,13 +180,13 @@ def elide_http_methods_if_unconflicting(
"""
def paths_to_methods_dict(
methods_and_paths: Iterable[Tuple[str, str]],
) -> Dict[str, Set[str]]:
methods_and_paths: Iterable[tuple[str, str]],
) -> dict[str, set[str]]:
"""
Given (method, path) pairs, produces a dict from path to set of methods
available at that path.
"""
result: Dict[str, Set[str]] = {}
result: dict[str, set[str]] = {}
for method, path in methods_and_paths:
result.setdefault(path, set()).add(method)
return result
@@ -210,8 +210,8 @@ def elide_http_methods_if_unconflicting(
def simplify_path_regexes(
registrations: Dict[Tuple[str, str], EndpointDescription],
) -> Dict[Tuple[str, str], EndpointDescription]:
registrations: dict[tuple[str, str], EndpointDescription],
) -> dict[tuple[str, str], EndpointDescription]:
"""
Simplify all the path regexes for the dict of endpoint descriptions,
so that we don't use the Python-specific regex extensions
@@ -270,8 +270,8 @@ def main() -> None:
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
categories_to_methods_and_paths: Dict[
Optional[str], Dict[Tuple[str, str], EndpointDescription]
categories_to_methods_and_paths: dict[
Optional[str], dict[tuple[str, str], EndpointDescription]
] = defaultdict(dict)
for (method, path), desc in elided_worker_paths.items():
@@ -283,7 +283,7 @@ def main() -> None:
def print_category(
category_name: Optional[str],
elided_worker_paths: Dict[Tuple[str, str], EndpointDescription],
elided_worker_paths: dict[tuple[str, str], EndpointDescription],
) -> None:
"""
Prints out a category, in documentation page style.

View File

@@ -26,7 +26,7 @@ import hashlib
import hmac
import logging
import sys
from typing import Any, Callable, Dict, Optional
from typing import Any, Callable, Optional
import requests
import yaml
@@ -262,7 +262,7 @@ def main() -> None:
args = parser.parse_args()
config: Optional[Dict[str, Any]] = None
config: Optional[dict[str, Any]] = None
if "config" in args and args.config:
config = yaml.safe_load(args.config)
@@ -350,7 +350,7 @@ def _read_file(file_path: Any, config_path: str) -> str:
sys.exit(1)
def _find_client_listener(config: Dict[str, Any]) -> Optional[str]:
def _find_client_listener(config: dict[str, Any]) -> Optional[str]:
# try to find a listener in the config. Returns a host:port pair
for listener in config.get("listeners", []):
if listener.get("type") != "http" or listener.get("tls", False):

View File

@@ -23,7 +23,6 @@ import argparse
import sys
import time
from datetime import datetime
from typing import List
import attr
@@ -50,15 +49,15 @@ class ReviewConfig(RootConfig):
class UserInfo:
user_id: str
creation_ts: int
emails: List[str] = attr.Factory(list)
private_rooms: List[str] = attr.Factory(list)
public_rooms: List[str] = attr.Factory(list)
ips: List[str] = attr.Factory(list)
emails: list[str] = attr.Factory(list)
private_rooms: list[str] = attr.Factory(list)
public_rooms: list[str] = attr.Factory(list)
ips: list[str] = attr.Factory(list)
def get_recent_users(
txn: LoggingTransaction, since_ms: int, exclude_app_service: bool
) -> List[UserInfo]:
) -> list[UserInfo]:
"""Fetches recently registered users and some info on them."""
sql = """

View File

@@ -33,15 +33,10 @@ from typing import (
Any,
Awaitable,
Callable,
Dict,
Generator,
Iterable,
List,
NoReturn,
Optional,
Set,
Tuple,
Type,
TypedDict,
TypeVar,
cast,
@@ -244,7 +239,7 @@ end_error: Optional[str] = None
# not the error then the script will show nothing outside of what's printed in the run
# function. If both are defined, the script will print both the error and the stacktrace.
end_error_exec_info: Optional[
Tuple[Type[BaseException], BaseException, TracebackType]
tuple[type[BaseException], BaseException, TracebackType]
] = None
R = TypeVar("R")
@@ -281,8 +276,8 @@ class Store(
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
def execute_sql(self, sql: str, *args: object) -> Awaitable[List[Tuple]]:
def r(txn: LoggingTransaction) -> List[Tuple]:
def execute_sql(self, sql: str, *args: object) -> Awaitable[list[tuple]]:
def r(txn: LoggingTransaction) -> list[tuple]:
txn.execute(sql, args)
return txn.fetchall()
@@ -292,8 +287,8 @@ class Store(
self,
txn: LoggingTransaction,
table: str,
headers: List[str],
rows: List[Tuple],
headers: list[str],
rows: list[tuple],
override_system_value: bool = False,
) -> None:
sql = "INSERT INTO %s (%s) %s VALUES (%s)" % (
@@ -330,7 +325,7 @@ class MockHomeserver(HomeServer):
class Porter:
def __init__(
self,
sqlite_config: Dict[str, Any],
sqlite_config: dict[str, Any],
progress: "Progress",
batch_size: int,
hs: HomeServer,
@@ -340,7 +335,7 @@ class Porter:
self.batch_size = batch_size
self.hs = hs
async def setup_table(self, table: str) -> Tuple[str, int, int, int, int]:
async def setup_table(self, table: str) -> tuple[str, int, int, int, int]:
if table in APPEND_ONLY_TABLES:
# It's safe to just carry on inserting.
row = await self.postgres_store.db_pool.simple_select_one(
@@ -403,10 +398,10 @@ class Porter:
return table, already_ported, total_to_port, forward_chunk, backward_chunk
async def get_table_constraints(self) -> Dict[str, Set[str]]:
async def get_table_constraints(self) -> dict[str, set[str]]:
"""Returns a map of tables that have foreign key constraints to tables they depend on."""
def _get_constraints(txn: LoggingTransaction) -> Dict[str, Set[str]]:
def _get_constraints(txn: LoggingTransaction) -> dict[str, set[str]]:
# We can pull the information about foreign key constraints out from
# the postgres schema tables.
sql = """
@@ -422,7 +417,7 @@ class Porter:
"""
txn.execute(sql)
results: Dict[str, Set[str]] = {}
results: dict[str, set[str]] = {}
for table, foreign_table in txn:
results.setdefault(table, set()).add(foreign_table)
return results
@@ -490,7 +485,7 @@ class Porter:
def r(
txn: LoggingTransaction,
) -> Tuple[Optional[List[str]], List[Tuple], List[Tuple]]:
) -> tuple[Optional[list[str]], list[tuple], list[tuple]]:
forward_rows = []
backward_rows = []
if do_forward[0]:
@@ -507,7 +502,7 @@ class Porter:
if forward_rows or backward_rows:
assert txn.description is not None
headers: Optional[List[str]] = [
headers: Optional[list[str]] = [
column[0] for column in txn.description
]
else:
@@ -574,7 +569,7 @@ class Porter:
while True:
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
txn.execute(select, (forward_chunk, self.batch_size))
rows = txn.fetchall()
assert txn.description is not None
@@ -956,7 +951,7 @@ class Porter:
self.progress.set_state("Copying to postgres")
constraints = await self.get_table_constraints()
tables_ported = set() # type: Set[str]
tables_ported = set() # type: set[str]
while tables_to_port_info_map:
# Pulls out all tables that are still to be ported and which
@@ -995,8 +990,8 @@ class Porter:
reactor.stop()
def _convert_rows(
self, table: str, headers: List[str], rows: List[Tuple]
) -> List[Tuple]:
self, table: str, headers: list[str], rows: list[tuple]
) -> list[tuple]:
bool_col_names = BOOLEAN_COLUMNS.get(table, [])
bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names]
@@ -1030,7 +1025,7 @@ class Porter:
return outrows
async def _setup_sent_transactions(self) -> Tuple[int, int, int]:
async def _setup_sent_transactions(self) -> tuple[int, int, int]:
# Only save things from the last day
yesterday = int(time.time() * 1000) - 86400000
@@ -1042,7 +1037,7 @@ class Porter:
")"
)
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
txn.execute(select)
rows = txn.fetchall()
assert txn.description is not None
@@ -1112,14 +1107,14 @@ class Porter:
self, table: str, forward_chunk: int, backward_chunk: int
) -> int:
frows = cast(
List[Tuple[int]],
list[tuple[int]],
await self.sqlite_store.execute_sql(
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
),
)
brows = cast(
List[Tuple[int]],
list[tuple[int]],
await self.sqlite_store.execute_sql(
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
),
@@ -1136,7 +1131,7 @@ class Porter:
async def _get_total_count_to_port(
self, table: str, forward_chunk: int, backward_chunk: int
) -> Tuple[int, int]:
) -> tuple[int, int]:
remaining, done = await make_deferred_yieldable(
defer.gatherResults(
[
@@ -1221,7 +1216,7 @@ class Porter:
async def _setup_sequence(
self,
sequence_name: str,
stream_id_tables: Iterable[Tuple[str, str]],
stream_id_tables: Iterable[tuple[str, str]],
) -> None:
"""Set a sequence to the correct value."""
current_stream_ids = []
@@ -1331,7 +1326,7 @@ class Progress:
"""Used to report progress of the port"""
def __init__(self) -> None:
self.tables: Dict[str, TableProgress] = {}
self.tables: dict[str, TableProgress] = {}
self.start_time = int(time.time())

View File

@@ -18,7 +18,7 @@
# [This file includes modifications made by New Vector Limited]
#
#
from typing import TYPE_CHECKING, Optional, Protocol, Tuple
from typing import TYPE_CHECKING, Optional, Protocol
from prometheus_client import Histogram
@@ -51,7 +51,7 @@ class Auth(Protocol):
room_id: str,
requester: Requester,
allow_departed_users: bool = False,
) -> Tuple[str, Optional[str]]:
) -> tuple[str, Optional[str]]:
"""Check if the user is in the room, or was at some point.
Args:
room_id: The room to check.
@@ -190,7 +190,7 @@ class Auth(Protocol):
async def check_user_in_room_or_world_readable(
self, room_id: str, requester: Requester, allow_departed_users: bool = False
) -> Tuple[str, Optional[str]]:
) -> tuple[str, Optional[str]]:
"""Checks that the user is or was in the room or the room is world
readable. If it isn't then an exception is raised.

View File

@@ -19,7 +19,7 @@
#
#
import logging
from typing import TYPE_CHECKING, Optional, Tuple
from typing import TYPE_CHECKING, Optional
from netaddr import IPAddress
@@ -64,7 +64,7 @@ class BaseAuth:
room_id: str,
requester: Requester,
allow_departed_users: bool = False,
) -> Tuple[str, Optional[str]]:
) -> tuple[str, Optional[str]]:
"""Check if the user is in the room, or was at some point.
Args:
room_id: The room to check.
@@ -114,7 +114,7 @@ class BaseAuth:
@trace
async def check_user_in_room_or_world_readable(
self, room_id: str, requester: Requester, allow_departed_users: bool = False
) -> Tuple[str, Optional[str]]:
) -> tuple[str, Optional[str]]:
"""Checks that the user is or was in the room or the room is world
readable. If it isn't then an exception is raised.

View File

@@ -13,7 +13,7 @@
#
#
import logging
from typing import TYPE_CHECKING, Optional, Set
from typing import TYPE_CHECKING, Optional
from urllib.parse import urlencode
from synapse._pydantic_compat import (
@@ -369,7 +369,7 @@ class MasDelegatedAuth(BaseAuth):
# We only allow a single device_id in the scope, so we find them all in the
# scope list, and raise if there are more than one. The OIDC server should be
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
device_ids: Set[str] = set()
device_ids: set[str] = set()
for tok in scope:
if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX):
device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])

View File

@@ -20,7 +20,7 @@
#
import logging
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set
from typing import TYPE_CHECKING, Any, Callable, Optional
from urllib.parse import urlencode
from authlib.oauth2 import ClientAuth
@@ -70,7 +70,7 @@ STABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:client:device:"
SCOPE_SYNAPSE_ADMIN = "urn:synapse:admin:*"
def scope_to_list(scope: str) -> List[str]:
def scope_to_list(scope: str) -> list[str]:
"""Convert a scope string to a list of scope tokens"""
return scope.strip().split(" ")
@@ -96,7 +96,7 @@ class IntrospectionResult:
absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms
return now_ms < absolute_expiry_ms
def get_scope_list(self) -> List[str]:
def get_scope_list(self) -> list[str]:
value = self._inner.get("scope")
if not isinstance(value, str):
return []
@@ -264,7 +264,7 @@ class MSC3861DelegatedAuth(BaseAuth):
logger.warning("Failed to load metadata:", exc_info=True)
return None
async def auth_metadata(self) -> Dict[str, Any]:
async def auth_metadata(self) -> dict[str, Any]:
"""
Returns the auth metadata dict
"""
@@ -303,7 +303,7 @@ class MSC3861DelegatedAuth(BaseAuth):
# By default, we shouldn't cache the result unless we know it's valid
cache_context.should_cache = False
introspection_endpoint = await self._introspection_endpoint()
raw_headers: Dict[str, str] = {
raw_headers: dict[str, str] = {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "application/json",
# Tell MAS that we support reading the device ID as an explicit
@@ -520,7 +520,7 @@ class MSC3861DelegatedAuth(BaseAuth):
raise InvalidClientTokenError("Token is not active")
# Let's look at the scope
scope: List[str] = introspection_result.get_scope_list()
scope: list[str] = introspection_result.get_scope_list()
# Determine type of user based on presence of particular scopes
has_user_scope = (
@@ -575,7 +575,7 @@ class MSC3861DelegatedAuth(BaseAuth):
# We only allow a single device_id in the scope, so we find them all in the
# scope list, and raise if there are more than one. The OIDC server should be
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
device_ids: Set[str] = set()
device_ids: set[str] = set()
for tok in scope:
if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX):
device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])

View File

@@ -26,7 +26,7 @@ import math
import typing
from enum import Enum
from http import HTTPStatus
from typing import Any, Dict, List, Optional, Union
from typing import Any, Optional, Union
from twisted.web import http
@@ -166,7 +166,7 @@ class CodeMessageException(RuntimeError):
self,
code: Union[int, HTTPStatus],
msg: str,
headers: Optional[Dict[str, str]] = None,
headers: Optional[dict[str, str]] = None,
):
super().__init__("%d: %s" % (code, msg))
@@ -201,7 +201,7 @@ class RedirectException(CodeMessageException):
super().__init__(code=http_code, msg=msg)
self.location = location
self.cookies: List[bytes] = []
self.cookies: list[bytes] = []
class SynapseError(CodeMessageException):
@@ -223,8 +223,8 @@ class SynapseError(CodeMessageException):
code: int,
msg: str,
errcode: str = Codes.UNKNOWN,
additional_fields: Optional[Dict] = None,
headers: Optional[Dict[str, str]] = None,
additional_fields: Optional[dict] = None,
headers: Optional[dict[str, str]] = None,
):
"""Constructs a synapse error.
@@ -236,7 +236,7 @@ class SynapseError(CodeMessageException):
super().__init__(code, msg, headers)
self.errcode = errcode
if additional_fields is None:
self._additional_fields: Dict = {}
self._additional_fields: dict = {}
else:
self._additional_fields = dict(additional_fields)
@@ -276,7 +276,7 @@ class ProxiedRequestError(SynapseError):
code: int,
msg: str,
errcode: str = Codes.UNKNOWN,
additional_fields: Optional[Dict] = None,
additional_fields: Optional[dict] = None,
):
super().__init__(code, msg, errcode, additional_fields)
@@ -409,7 +409,7 @@ class OAuthInsufficientScopeError(SynapseError):
def __init__(
self,
required_scopes: List[str],
required_scopes: list[str],
):
headers = {
"WWW-Authenticate": 'Bearer error="insufficient_scope", scope="%s"'

View File

@@ -26,12 +26,9 @@ from typing import (
Awaitable,
Callable,
Collection,
Dict,
Iterable,
List,
Mapping,
Optional,
Set,
TypeVar,
Union,
)
@@ -248,34 +245,34 @@ class FilterCollection:
async def filter_presence(
self, presence_states: Iterable[UserPresenceState]
) -> List[UserPresenceState]:
) -> list[UserPresenceState]:
return await self._presence_filter.filter(presence_states)
async def filter_global_account_data(
self, events: Iterable[JsonDict]
) -> List[JsonDict]:
) -> list[JsonDict]:
return await self._global_account_data_filter.filter(events)
async def filter_room_state(self, events: Iterable[EventBase]) -> List[EventBase]:
async def filter_room_state(self, events: Iterable[EventBase]) -> list[EventBase]:
return await self._room_state_filter.filter(
await self._room_filter.filter(events)
)
async def filter_room_timeline(
self, events: Iterable[EventBase]
) -> List[EventBase]:
) -> list[EventBase]:
return await self._room_timeline_filter.filter(
await self._room_filter.filter(events)
)
async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> List[JsonDict]:
async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> list[JsonDict]:
return await self._room_ephemeral_filter.filter(
await self._room_filter.filter(events)
)
async def filter_room_account_data(
self, events: Iterable[JsonDict]
) -> List[JsonDict]:
) -> list[JsonDict]:
return await self._room_account_data_filter.filter(
await self._room_filter.filter(events)
)
@@ -440,7 +437,7 @@ class Filter:
return True
def _check_fields(self, field_matchers: Dict[str, Callable[[str], bool]]) -> bool:
def _check_fields(self, field_matchers: dict[str, Callable[[str], bool]]) -> bool:
"""Checks whether the filter matches the given event fields.
Args:
@@ -474,7 +471,7 @@ class Filter:
# Otherwise, accept it.
return True
def filter_rooms(self, room_ids: Iterable[str]) -> Set[str]:
def filter_rooms(self, room_ids: Iterable[str]) -> set[str]:
"""Apply the 'rooms' filter to a given list of rooms.
Args:
@@ -496,7 +493,7 @@ class Filter:
async def _check_event_relations(
self, events: Collection[FilterEvent]
) -> List[FilterEvent]:
) -> list[FilterEvent]:
# The event IDs to check, mypy doesn't understand the isinstance check.
event_ids = [event.event_id for event in events if isinstance(event, EventBase)] # type: ignore[attr-defined]
event_ids_to_keep = set(
@@ -511,7 +508,7 @@ class Filter:
if not isinstance(event, EventBase) or event.event_id in event_ids_to_keep
]
async def filter(self, events: Iterable[FilterEvent]) -> List[FilterEvent]:
async def filter(self, events: Iterable[FilterEvent]) -> list[FilterEvent]:
result = [event for event in events if self._check(event)]
if self.related_by_senders or self.related_by_rel_types:

View File

@@ -20,7 +20,7 @@
#
#
from typing import TYPE_CHECKING, Dict, Hashable, Optional, Tuple
from typing import TYPE_CHECKING, Hashable, Optional
from synapse.api.errors import LimitExceededError
from synapse.config.ratelimiting import RatelimitSettings
@@ -92,7 +92,7 @@ class Ratelimiter:
# * The number of tokens currently in the bucket,
# * The time point when the bucket was last completely empty, and
# * The rate_hz (leak rate) of this particular bucket.
self.actions: Dict[Hashable, Tuple[float, float, float]] = {}
self.actions: dict[Hashable, tuple[float, float, float]] = {}
self.clock.looping_call(self._prune_message_counts, 60 * 1000)
@@ -109,7 +109,7 @@ class Ratelimiter:
def _get_action_counts(
self, key: Hashable, time_now_s: float
) -> Tuple[float, float, float]:
) -> tuple[float, float, float]:
"""Retrieve the action counts, with a fallback representing an empty bucket."""
return self.actions.get(key, (0.0, time_now_s, 0.0))
@@ -122,7 +122,7 @@ class Ratelimiter:
update: bool = True,
n_actions: int = 1,
_time_now_s: Optional[float] = None,
) -> Tuple[bool, float]:
) -> tuple[bool, float]:
"""Can the entity (e.g. user or IP address) perform the action?
Checks if the user has ratelimiting disabled in the database by looking

View File

@@ -18,7 +18,7 @@
#
#
from typing import Callable, Dict, Optional, Tuple
from typing import Callable, Optional
import attr
@@ -109,7 +109,7 @@ class RoomVersion:
# is not enough to mark it "supported": the push rule evaluator also needs to
# support the flag. Unknown flags are ignored by the evaluator, making conditions
# fail if used.
msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag
msc3931_push_features: tuple[str, ...] # values from PushRuleRoomFlag
# MSC3757: Restricting who can overwrite a state event
msc3757_enabled: bool
# MSC4289: Creator power enabled
@@ -476,7 +476,7 @@ class RoomVersions:
)
KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
KNOWN_ROOM_VERSIONS: dict[str, RoomVersion] = {
v.identifier: v
for v in (
RoomVersions.V1,

View File

@@ -34,11 +34,8 @@ from typing import (
Any,
Awaitable,
Callable,
Dict,
List,
NoReturn,
Optional,
Tuple,
cast,
)
from wsgiref.simple_server import WSGIServer
@@ -98,8 +95,8 @@ reactor = cast(ISynapseReactor, _reactor)
logger = logging.getLogger(__name__)
_instance_id_to_sighup_callbacks_map: Dict[
str, List[Tuple[Callable[..., None], Tuple[object, ...], Dict[str, object]]]
_instance_id_to_sighup_callbacks_map: dict[
str, list[tuple[Callable[..., None], tuple[object, ...], dict[str, object]]]
] = {}
"""
Map from homeserver instance_id to a list of callbacks.
@@ -176,7 +173,7 @@ def start_worker_reactor(
def start_reactor(
appname: str,
soft_file_limit: int,
gc_thresholds: Optional[Tuple[int, int, int]],
gc_thresholds: Optional[tuple[int, int, int]],
pid_file: Optional[str],
daemonize: bool,
print_pidfile: bool,
@@ -309,7 +306,7 @@ def register_start(
def listen_metrics(
bind_addresses: StrCollection, port: int
) -> List[Tuple[WSGIServer, Thread]]:
) -> list[tuple[WSGIServer, Thread]]:
"""
Start Prometheus metrics server.
@@ -330,7 +327,7 @@ def listen_metrics(
from synapse.metrics import RegistryProxy
servers: List[Tuple[WSGIServer, Thread]] = []
servers: list[tuple[WSGIServer, Thread]] = []
for host in bind_addresses:
logger.info("Starting metrics listener on %s:%d", host, port)
server, thread = start_http_server_prometheus(
@@ -345,7 +342,7 @@ def listen_manhole(
port: int,
manhole_settings: ManholeConfig,
manhole_globals: dict,
) -> List[Port]:
) -> list[Port]:
# twisted.conch.manhole 21.1.0 uses "int_from_bytes", which produces a confusing
# warning. It's fixed by https://github.com/twisted/twisted/pull/1522), so
# suppress the warning for now.
@@ -370,7 +367,7 @@ def listen_tcp(
factory: ServerFactory,
reactor: IReactorTCP = reactor,
backlog: int = 50,
) -> List[Port]:
) -> list[Port]:
"""
Create a TCP socket for a port and several addresses
@@ -395,7 +392,7 @@ def listen_unix(
factory: ServerFactory,
reactor: IReactorUNIX = reactor,
backlog: int = 50,
) -> List[Port]:
) -> list[Port]:
"""
Create a UNIX socket for a given path and 'mode' permission
@@ -419,7 +416,7 @@ def listen_http(
max_request_body_size: int,
context_factory: Optional[IOpenSSLContextFactory],
reactor: ISynapseReactor = reactor,
) -> List[Port]:
) -> list[Port]:
"""
Args:
listener_config: TODO
@@ -489,7 +486,7 @@ def listen_ssl(
context_factory: IOpenSSLContextFactory,
reactor: IReactorSSL = reactor,
backlog: int = 50,
) -> List[Port]:
) -> list[Port]:
"""
Create an TLS-over-TCP socket for a port and several addresses

View File

@@ -24,7 +24,7 @@ import logging
import os
import sys
import tempfile
from typing import List, Mapping, Optional, Sequence, Tuple
from typing import Mapping, Optional, Sequence
from twisted.internet import defer, task
@@ -150,7 +150,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
if list(os.listdir(self.base_directory)):
raise Exception("Directory must be empty")
def write_events(self, room_id: str, events: List[EventBase]) -> None:
def write_events(self, room_id: str, events: list[EventBase]) -> None:
room_directory = os.path.join(self.base_directory, "rooms", room_id)
os.makedirs(room_directory, exist_ok=True)
events_file = os.path.join(room_directory, "events")
@@ -255,7 +255,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
return self.base_directory
def load_config(argv_options: List[str]) -> Tuple[HomeServerConfig, argparse.Namespace]:
def load_config(argv_options: list[str]) -> tuple[HomeServerConfig, argparse.Namespace]:
parser = argparse.ArgumentParser(description="Synapse Admin Command")
HomeServerConfig.add_arguments_to_parser(parser)

View File

@@ -26,13 +26,13 @@ import os
import signal
import sys
from types import FrameType
from typing import Any, Callable, Dict, List, Optional
from typing import Any, Callable, Optional
from twisted.internet.main import installReactor
# a list of the original signal handlers, before we installed our custom ones.
# We restore these in our child processes.
_original_signal_handlers: Dict[int, Any] = {}
_original_signal_handlers: dict[int, Any] = {}
class ProxiedReactor:
@@ -72,7 +72,7 @@ class ProxiedReactor:
def _worker_entrypoint(
func: Callable[[], None], proxy_reactor: ProxiedReactor, args: List[str]
func: Callable[[], None], proxy_reactor: ProxiedReactor, args: list[str]
) -> None:
"""
Entrypoint for a forked worker process.
@@ -128,7 +128,7 @@ def main() -> None:
# Split up the subsequent arguments into each workers' arguments;
# `--` is our delimiter of choice.
args_by_worker: List[List[str]] = [
args_by_worker: list[list[str]] = [
list(args)
for cond, args in itertools.groupby(ns.args, lambda ele: ele != "--")
if cond and args
@@ -167,7 +167,7 @@ def main() -> None:
update_proc.join()
print("===== PREPARED DATABASE =====", file=sys.stderr)
processes: List[multiprocessing.Process] = []
processes: list[multiprocessing.Process] = []
# Install signal handlers to propagate signals to all our children, so that they
# shut down cleanly. This also inhibits our own exit, but that's good: we want to

View File

@@ -21,7 +21,6 @@
#
import logging
import sys
from typing import Dict, List
from twisted.web.resource import Resource
@@ -181,7 +180,7 @@ class GenericWorkerServer(HomeServer):
# We always include an admin resource that we populate with servlets as needed
admin_resource = JsonResource(self, canonical_json=False)
resources: Dict[str, Resource] = {
resources: dict[str, Resource] = {
# We always include a health resource.
"/health": HealthResource(),
"/_synapse/admin": admin_resource,
@@ -314,7 +313,7 @@ class GenericWorkerServer(HomeServer):
self.get_replication_command_handler().start_replication(self)
def load_config(argv_options: List[str]) -> HomeServerConfig:
def load_config(argv_options: list[str]) -> HomeServerConfig:
"""
Parse the commandline and config files (does not generate config)

View File

@@ -22,7 +22,7 @@
import logging
import os
import sys
from typing import Dict, Iterable, List, Optional
from typing import Iterable, Optional
from twisted.internet.tcp import Port
from twisted.web.resource import EncodingResourceWrapper, Resource
@@ -99,7 +99,7 @@ class SynapseHomeServer(HomeServer):
site_tag = listener_config.get_site_tag()
# We always include a health resource.
resources: Dict[str, Resource] = {"/health": HealthResource()}
resources: dict[str, Resource] = {"/health": HealthResource()}
for res in listener_config.http_options.resources:
for name in res.names:
@@ -170,7 +170,7 @@ class SynapseHomeServer(HomeServer):
def _configure_named_resource(
self, name: str, compress: bool = False
) -> Dict[str, Resource]:
) -> dict[str, Resource]:
"""Build a resource map for a named resource
Args:
@@ -180,7 +180,7 @@ class SynapseHomeServer(HomeServer):
Returns:
map from path to HTTP resource
"""
resources: Dict[str, Resource] = {}
resources: dict[str, Resource] = {}
if name == "client":
client_resource: Resource = ClientRestResource(self)
if compress:
@@ -318,7 +318,7 @@ class SynapseHomeServer(HomeServer):
logger.warning("Unrecognized listener type: %s", listener.type)
def load_or_generate_config(argv_options: List[str]) -> HomeServerConfig:
def load_or_generate_config(argv_options: list[str]) -> HomeServerConfig:
"""
Parse the commandline and config files

View File

@@ -22,7 +22,7 @@ import logging
import math
import resource
import sys
from typing import TYPE_CHECKING, List, Mapping, Sized, Tuple
from typing import TYPE_CHECKING, Mapping, Sized
from prometheus_client import Gauge
@@ -54,7 +54,7 @@ Phone home stats are sent every 3 hours
# Contains the list of processes we will be monitoring
# currently either 0 or 1
_stats_process: List[Tuple[int, "resource.struct_rusage"]] = []
_stats_process: list[tuple[int, "resource.struct_rusage"]] = []
# Gauges to expose monthly active user control metrics
current_mau_gauge = Gauge(
@@ -82,12 +82,12 @@ registered_reserved_users_mau_gauge = Gauge(
def phone_stats_home(
hs: "HomeServer",
stats: JsonDict,
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
stats_process: list[tuple[int, "resource.struct_rusage"]] = _stats_process,
) -> "defer.Deferred[None]":
async def _phone_stats_home(
hs: "HomeServer",
stats: JsonDict,
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
stats_process: list[tuple[int, "resource.struct_rusage"]] = _stats_process,
) -> None:
"""Collect usage statistics and send them to the configured endpoint.

View File

@@ -25,9 +25,7 @@ import re
from enum import Enum
from typing import (
TYPE_CHECKING,
Dict,
Iterable,
List,
Optional,
Pattern,
Sequence,
@@ -59,11 +57,11 @@ logger = logging.getLogger(__name__)
# Type for the `device_one_time_keys_count` field in an appservice transaction
# user ID -> {device ID -> {algorithm -> count}}
TransactionOneTimeKeysCount = Dict[str, Dict[str, Dict[str, int]]]
TransactionOneTimeKeysCount = dict[str, dict[str, dict[str, int]]]
# Type for the `device_unused_fallback_key_types` field in an appservice transaction
# user ID -> {device ID -> [algorithm]}
TransactionUnusedFallbackKeys = Dict[str, Dict[str, List[str]]]
TransactionUnusedFallbackKeys = dict[str, dict[str, list[str]]]
class ApplicationServiceState(Enum):
@@ -145,7 +143,7 @@ class ApplicationService:
def _check_namespaces(
self, namespaces: Optional[JsonDict]
) -> Dict[str, List[Namespace]]:
) -> dict[str, list[Namespace]]:
# Sanity check that it is of the form:
# {
# users: [ {regex: "[A-z]+.*", exclusive: true}, ...],
@@ -155,7 +153,7 @@ class ApplicationService:
if namespaces is None:
namespaces = {}
result: Dict[str, List[Namespace]] = {}
result: dict[str, list[Namespace]] = {}
for ns in ApplicationService.NS_LIST:
result[ns] = []
@@ -388,7 +386,7 @@ class ApplicationService:
def is_exclusive_room(self, room_id: str) -> bool:
return self._is_exclusive(ApplicationService.NS_ROOMS, room_id)
def get_exclusive_user_regexes(self) -> List[Pattern[str]]:
def get_exclusive_user_regexes(self) -> list[Pattern[str]]:
"""Get the list of regexes used to determine if a user is exclusively
registered by the AS
"""
@@ -417,8 +415,8 @@ class AppServiceTransaction:
service: ApplicationService,
id: int,
events: Sequence[EventBase],
ephemeral: List[JsonMapping],
to_device_messages: List[JsonMapping],
ephemeral: list[JsonMapping],
to_device_messages: list[JsonMapping],
one_time_keys_count: TransactionOneTimeKeysCount,
unused_fallback_keys: TransactionUnusedFallbackKeys,
device_list_summary: DeviceListUpdates,

View File

@@ -23,13 +23,10 @@ import logging
import urllib.parse
from typing import (
TYPE_CHECKING,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
@@ -133,14 +130,14 @@ class ApplicationServiceApi(SimpleHttpClient):
self.clock = hs.get_clock()
self.config = hs.config.appservice
self.protocol_meta_cache: ResponseCache[Tuple[str, str]] = ResponseCache(
self.protocol_meta_cache: ResponseCache[tuple[str, str]] = ResponseCache(
clock=hs.get_clock(),
name="as_protocol_meta",
server_name=self.server_name,
timeout_ms=HOUR_IN_MS,
)
def _get_headers(self, service: "ApplicationService") -> Dict[bytes, List[bytes]]:
def _get_headers(self, service: "ApplicationService") -> dict[bytes, list[bytes]]:
"""This makes sure we have always the auth header and opentracing headers set."""
# This is also ensured before in the functions. However this is needed to please
@@ -210,8 +207,8 @@ class ApplicationServiceApi(SimpleHttpClient):
service: "ApplicationService",
kind: str,
protocol: str,
fields: Dict[bytes, List[bytes]],
) -> List[JsonDict]:
fields: dict[bytes, list[bytes]],
) -> list[JsonDict]:
if kind == ThirdPartyEntityKind.USER:
required_field = "userid"
elif kind == ThirdPartyEntityKind.LOCATION:
@@ -225,7 +222,7 @@ class ApplicationServiceApi(SimpleHttpClient):
assert service.hs_token is not None
try:
args: Mapping[bytes, Union[List[bytes], str]] = fields
args: Mapping[bytes, Union[list[bytes], str]] = fields
if self.config.use_appservice_legacy_authorization:
args = {
**fields,
@@ -320,8 +317,8 @@ class ApplicationServiceApi(SimpleHttpClient):
self,
service: "ApplicationService",
events: Sequence[EventBase],
ephemeral: List[JsonMapping],
to_device_messages: List[JsonMapping],
ephemeral: list[JsonMapping],
to_device_messages: list[JsonMapping],
one_time_keys_count: TransactionOneTimeKeysCount,
unused_fallback_keys: TransactionUnusedFallbackKeys,
device_list_summary: DeviceListUpdates,
@@ -429,9 +426,9 @@ class ApplicationServiceApi(SimpleHttpClient):
return False
async def claim_client_keys(
self, service: "ApplicationService", query: List[Tuple[str, str, str, int]]
) -> Tuple[
Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]]
self, service: "ApplicationService", query: list[tuple[str, str, str, int]]
) -> tuple[
dict[str, dict[str, dict[str, JsonDict]]], list[tuple[str, str, str, int]]
]:
"""Claim one time keys from an application service.
@@ -457,7 +454,7 @@ class ApplicationServiceApi(SimpleHttpClient):
assert service.hs_token is not None
# Create the expected payload shape.
body: Dict[str, Dict[str, List[str]]] = {}
body: dict[str, dict[str, list[str]]] = {}
for user_id, device, algorithm, count in query:
body.setdefault(user_id, {}).setdefault(device, []).extend(
[algorithm] * count
@@ -502,8 +499,8 @@ class ApplicationServiceApi(SimpleHttpClient):
return response, missing
async def query_keys(
self, service: "ApplicationService", query: Dict[str, List[str]]
) -> Dict[str, Dict[str, Dict[str, JsonDict]]]:
self, service: "ApplicationService", query: dict[str, list[str]]
) -> dict[str, dict[str, dict[str, JsonDict]]]:
"""Query the application service for keys.
Note that any error (including a timeout) is treated as the application
@@ -545,7 +542,7 @@ class ApplicationServiceApi(SimpleHttpClient):
def _serialize(
self, service: "ApplicationService", events: Iterable[EventBase]
) -> List[JsonDict]:
) -> list[JsonDict]:
time_now = self.clock.time_msec()
return [
serialize_event(

View File

@@ -61,13 +61,9 @@ from typing import (
Awaitable,
Callable,
Collection,
Dict,
Iterable,
List,
Optional,
Sequence,
Set,
Tuple,
)
from twisted.internet.interfaces import IDelayedCall
@@ -183,16 +179,16 @@ class _ServiceQueuer:
def __init__(self, txn_ctrl: "_TransactionController", hs: "HomeServer"):
# dict of {service_id: [events]}
self.queued_events: Dict[str, List[EventBase]] = {}
self.queued_events: dict[str, list[EventBase]] = {}
# dict of {service_id: [events]}
self.queued_ephemeral: Dict[str, List[JsonMapping]] = {}
self.queued_ephemeral: dict[str, list[JsonMapping]] = {}
# dict of {service_id: [to_device_message_json]}
self.queued_to_device_messages: Dict[str, List[JsonMapping]] = {}
self.queued_to_device_messages: dict[str, list[JsonMapping]] = {}
# dict of {service_id: [device_list_summary]}
self.queued_device_list_summaries: Dict[str, List[DeviceListUpdates]] = {}
self.queued_device_list_summaries: dict[str, list[DeviceListUpdates]] = {}
# the appservices which currently have a transaction in flight
self.requests_in_flight: Set[str] = set()
self.requests_in_flight: set[str] = set()
self.txn_ctrl = txn_ctrl
self._msc3202_transaction_extensions_enabled: bool = (
hs.config.experimental.msc3202_transaction_extensions
@@ -302,7 +298,7 @@ class _ServiceQueuer:
events: Iterable[EventBase],
ephemerals: Iterable[JsonMapping],
to_device_messages: Iterable[JsonMapping],
) -> Tuple[TransactionOneTimeKeysCount, TransactionUnusedFallbackKeys]:
) -> tuple[TransactionOneTimeKeysCount, TransactionUnusedFallbackKeys]:
"""
Given a list of the events, ephemeral messages and to-device messages,
- first computes a list of application services users that may have
@@ -313,14 +309,14 @@ class _ServiceQueuer:
"""
# Set of 'interesting' users who may have updates
users: Set[str] = set()
users: set[str] = set()
# The sender is always included
users.add(service.sender.to_string())
# All AS users that would receive the PDUs or EDUs sent to these rooms
# are classed as 'interesting'.
rooms_of_interesting_users: Set[str] = set()
rooms_of_interesting_users: set[str] = set()
# PDUs
rooms_of_interesting_users.update(event.room_id for event in events)
# EDUs
@@ -364,7 +360,7 @@ class _TransactionController:
self.as_api = hs.get_application_service_api()
# map from service id to recoverer instance
self.recoverers: Dict[str, "_Recoverer"] = {}
self.recoverers: dict[str, "_Recoverer"] = {}
# for UTs
self.RECOVERER_CLASS = _Recoverer
@@ -373,8 +369,8 @@ class _TransactionController:
self,
service: ApplicationService,
events: Sequence[EventBase],
ephemeral: Optional[List[JsonMapping]] = None,
to_device_messages: Optional[List[JsonMapping]] = None,
ephemeral: Optional[list[JsonMapping]] = None,
to_device_messages: Optional[list[JsonMapping]] = None,
one_time_keys_count: Optional[TransactionOneTimeKeysCount] = None,
unused_fallback_keys: Optional[TransactionUnusedFallbackKeys] = None,
device_list_summary: Optional[DeviceListUpdates] = None,

View File

@@ -20,13 +20,12 @@
#
#
import sys
from typing import List
from synapse.config._base import ConfigError
from synapse.config.homeserver import HomeServerConfig
def main(args: List[str]) -> None:
def main(args: list[str]) -> None:
action = args[1] if len(args) > 1 and args[1] == "read" else None
# If we're reading a key in the config file, then `args[1]` will be `read` and `args[2]`
# will be the key to read.

View File

@@ -33,14 +33,10 @@ from textwrap import dedent
from typing import (
Any,
ClassVar,
Dict,
Iterable,
Iterator,
List,
MutableMapping,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
@@ -321,9 +317,9 @@ class Config:
def read_templates(
self,
filenames: List[str],
filenames: list[str],
custom_template_directories: Optional[Iterable[str]] = None,
) -> List[jinja2.Template]:
) -> list[jinja2.Template]:
"""Load a list of template files from disk using the given variables.
This function will attempt to load the given templates from the default Synapse
@@ -402,7 +398,7 @@ class RootConfig:
class, lower-cased and with "Config" removed.
"""
config_classes: List[Type[Config]] = []
config_classes: list[type[Config]] = []
def __init__(self, config_files: StrSequence = ()):
# Capture absolute paths here, so we can reload config after we daemonize.
@@ -471,7 +467,7 @@ class RootConfig:
generate_secrets: bool = False,
report_stats: Optional[bool] = None,
open_private_ports: bool = False,
listeners: Optional[List[dict]] = None,
listeners: Optional[list[dict]] = None,
tls_certificate_path: Optional[str] = None,
tls_private_key_path: Optional[str] = None,
) -> str:
@@ -545,7 +541,7 @@ class RootConfig:
@classmethod
def load_config(
cls: Type[TRootConfig], description: str, argv_options: List[str]
cls: type[TRootConfig], description: str, argv_options: list[str]
) -> TRootConfig:
"""Parse the commandline and config files
@@ -605,8 +601,8 @@ class RootConfig:
@classmethod
def load_config_with_parser(
cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv_options: List[str]
) -> Tuple[TRootConfig, argparse.Namespace]:
cls: type[TRootConfig], parser: argparse.ArgumentParser, argv_options: list[str]
) -> tuple[TRootConfig, argparse.Namespace]:
"""Parse the commandline and config files with the given parser
Doesn't support config-file-generation: used by the worker apps.
@@ -658,7 +654,7 @@ class RootConfig:
@classmethod
def load_or_generate_config(
cls: Type[TRootConfig], description: str, argv_options: List[str]
cls: type[TRootConfig], description: str, argv_options: list[str]
) -> Optional[TRootConfig]:
"""Parse the commandline and config files
@@ -858,7 +854,7 @@ class RootConfig:
def parse_config_dict(
self,
config_dict: Dict[str, Any],
config_dict: dict[str, Any],
config_dir_path: str,
data_dir_path: str,
allow_secrets_in_config: bool = True,
@@ -883,7 +879,7 @@ class RootConfig:
)
def generate_missing_files(
self, config_dict: Dict[str, Any], config_dir_path: str
self, config_dict: dict[str, Any], config_dir_path: str
) -> None:
self.invoke_all("generate_files", config_dict, config_dir_path)
@@ -930,7 +926,7 @@ class RootConfig:
"""
def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]:
def read_config_files(config_files: Iterable[str]) -> dict[str, Any]:
"""Read the config files and shallowly merge them into a dict.
Successive configurations are shallowly merged into ones provided earlier,
@@ -964,7 +960,7 @@ def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]:
return specified_config
def find_config_files(search_paths: List[str]) -> List[str]:
def find_config_files(search_paths: list[str]) -> list[str]:
"""Finds config files using a list of search paths. If a path is a file
then that file path is added to the list. If a search path is a directory
then all the "*.yaml" files in that directory are added to the list in
@@ -1018,7 +1014,7 @@ class ShardedWorkerHandlingConfig:
below).
"""
instances: List[str]
instances: list[str]
def should_handle(self, instance_name: str, key: str) -> bool:
"""Whether this instance is responsible for handling the given key."""

View File

@@ -2,15 +2,11 @@ import argparse
from typing import (
Any,
Collection,
Dict,
Iterable,
Iterator,
List,
Literal,
MutableMapping,
Optional,
Tuple,
Type,
TypeVar,
Union,
overload,
@@ -129,8 +125,8 @@ class RootConfig:
mas: mas.MasConfig
matrix_rtc: matrixrtc.MatrixRtcConfig
config_classes: List[Type["Config"]] = ...
config_files: List[str]
config_classes: list[type["Config"]] = ...
config_files: list[str]
def __init__(self, config_files: Collection[str] = ...) -> None: ...
def invoke_all(
self, func_name: str, *args: Any, **kwargs: Any
@@ -139,7 +135,7 @@ class RootConfig:
def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: Any) -> None: ...
def parse_config_dict(
self,
config_dict: Dict[str, Any],
config_dict: dict[str, Any],
config_dir_path: str,
data_dir_path: str,
allow_secrets_in_config: bool = ...,
@@ -158,11 +154,11 @@ class RootConfig:
) -> str: ...
@classmethod
def load_or_generate_config(
cls: Type[TRootConfig], description: str, argv_options: List[str]
cls: type[TRootConfig], description: str, argv_options: list[str]
) -> Optional[TRootConfig]: ...
@classmethod
def load_config(
cls: Type[TRootConfig], description: str, argv_options: List[str]
cls: type[TRootConfig], description: str, argv_options: list[str]
) -> TRootConfig: ...
@classmethod
def add_arguments_to_parser(
@@ -170,8 +166,8 @@ class RootConfig:
) -> None: ...
@classmethod
def load_config_with_parser(
cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv_options: List[str]
) -> Tuple[TRootConfig, argparse.Namespace]: ...
cls: type[TRootConfig], parser: argparse.ArgumentParser, argv_options: list[str]
) -> tuple[TRootConfig, argparse.Namespace]: ...
def generate_missing_files(
self, config_dict: dict, config_dir_path: str
) -> None: ...
@@ -203,16 +199,16 @@ class Config:
def read_template(self, filenames: str) -> jinja2.Template: ...
def read_templates(
self,
filenames: List[str],
filenames: list[str],
custom_template_directories: Optional[Iterable[str]] = None,
) -> List[jinja2.Template]: ...
) -> list[jinja2.Template]: ...
def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]: ...
def find_config_files(search_paths: List[str]) -> List[str]: ...
def read_config_files(config_files: Iterable[str]) -> dict[str, Any]: ...
def find_config_files(search_paths: list[str]) -> list[str]: ...
class ShardedWorkerHandlingConfig:
instances: List[str]
def __init__(self, instances: List[str]) -> None: ...
instances: list[str]
def __init__(self, instances: list[str]) -> None: ...
def should_handle(self, instance_name: str, key: str) -> bool: ... # noqa: F811
class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig):

View File

@@ -18,7 +18,7 @@
# [This file includes modifications made by New Vector Limited]
#
#
from typing import Any, Dict, Type, TypeVar
from typing import Any, TypeVar
import jsonschema
@@ -79,8 +79,8 @@ Model = TypeVar("Model", bound=BaseModel)
def parse_and_validate_mapping(
config: Any,
model_type: Type[Model],
) -> Dict[str, Model]:
model_type: type[Model],
) -> dict[str, Model]:
"""Parse `config` as a mapping from strings to a given `Model` type.
Args:
config: The configuration data to check
@@ -93,7 +93,7 @@ def parse_and_validate_mapping(
try:
# type-ignore: mypy doesn't like constructing `Dict[str, model_type]` because
# `model_type` is a runtime variable. Pydantic is fine with this.
instances = parse_obj_as(Dict[str, model_type], config) # type: ignore[valid-type]
instances = parse_obj_as(dict[str, model_type], config) # type: ignore[valid-type]
except ValidationError as e:
raise ConfigError(str(e)) from e
return instances

View File

@@ -20,7 +20,7 @@
#
import logging
from typing import Any, Iterable, Optional, Tuple
from typing import Any, Iterable, Optional
from synapse.api.constants import EventTypes
from synapse.config._base import Config, ConfigError
@@ -46,7 +46,7 @@ class ApiConfig(Config):
def _get_prejoin_state_entries(
self, config: JsonDict
) -> Iterable[Tuple[str, Optional[str]]]:
) -> Iterable[tuple[str, Optional[str]]]:
"""Get the event types and state keys to include in the prejoin state."""
room_prejoin_state_config = config.get("room_prejoin_state") or {}

View File

@@ -21,7 +21,7 @@
#
import logging
from typing import Any, Dict, List
from typing import Any
from urllib import parse as urlparse
import yaml
@@ -61,13 +61,13 @@ class AppServiceConfig(Config):
def load_appservices(
hostname: str, config_files: List[str]
) -> List[ApplicationService]:
hostname: str, config_files: list[str]
) -> list[ApplicationService]:
"""Returns a list of Application Services from the config files."""
# Dicts of value -> filename
seen_as_tokens: Dict[str, str] = {}
seen_ids: Dict[str, str] = {}
seen_as_tokens: dict[str, str] = {}
seen_ids: dict[str, str] = {}
appservices = []

View File

@@ -23,7 +23,7 @@ import logging
import os
import re
import threading
from typing import Any, Callable, Dict, Mapping, Optional
from typing import Any, Callable, Mapping, Optional
import attr
@@ -38,7 +38,7 @@ logger = logging.getLogger(__name__)
_CACHE_PREFIX = "SYNAPSE_CACHE_FACTOR"
# Map from canonicalised cache name to cache.
_CACHES: Dict[str, Callable[[float], None]] = {}
_CACHES: dict[str, Callable[[float], None]] = {}
# a lock on the contents of _CACHES
_CACHES_LOCK = threading.Lock()
@@ -104,7 +104,7 @@ class CacheConfig(Config):
_environ: Mapping[str, str] = os.environ
event_cache_size: int
cache_factors: Dict[str, float]
cache_factors: dict[str, float]
global_factor: float
track_memory_usage: bool
expiry_time_msec: Optional[int]

View File

@@ -20,7 +20,7 @@
#
#
from typing import Any, List, Optional
from typing import Any, Optional
from synapse.config.sso import SsoAttributeRequirement
from synapse.types import JsonDict
@@ -107,7 +107,7 @@ REQUIRED_ATTRIBUTES_SCHEMA = {
def _parsed_required_attributes_def(
required_attributes: Any,
) -> List[SsoAttributeRequirement]:
) -> list[SsoAttributeRequirement]:
validate_config(
REQUIRED_ATTRIBUTES_SCHEMA,
required_attributes,

View File

@@ -22,7 +22,7 @@
import argparse
import logging
import os
from typing import Any, List
from typing import Any
from synapse.config._base import Config, ConfigError
from synapse.types import JsonDict
@@ -83,7 +83,7 @@ class DatabaseConfig(Config):
def __init__(self, *args: Any):
super().__init__(*args)
self.databases: List[DatabaseConnectionConfig] = []
self.databases: list[DatabaseConnectionConfig] = []
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
# We *experimentally* support specifying multiple databases via the

View File

@@ -23,7 +23,7 @@
import hashlib
import logging
import os
from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional
from typing import TYPE_CHECKING, Any, Iterator, Optional
import attr
import jsonschema
@@ -110,7 +110,7 @@ class TrustedKeyServer:
server_name: str
# map from key id to key object, or None to disable signature verification.
verify_keys: Optional[Dict[str, VerifyKey]] = None
verify_keys: Optional[dict[str, VerifyKey]] = None
class KeyConfig(Config):
@@ -250,7 +250,7 @@ class KeyConfig(Config):
- server_name: "matrix.org"
""" % locals()
def read_signing_keys(self, signing_key_path: str, name: str) -> List[SigningKey]:
def read_signing_keys(self, signing_key_path: str, name: str) -> list[SigningKey]:
"""Read the signing keys in the given path.
Args:
@@ -280,7 +280,7 @@ class KeyConfig(Config):
def read_old_signing_keys(
self, old_signing_keys: Optional[JsonDict]
) -> Dict[str, "VerifyKeyWithExpiry"]:
) -> dict[str, "VerifyKeyWithExpiry"]:
if old_signing_keys is None:
return {}
keys = {}
@@ -299,7 +299,7 @@ class KeyConfig(Config):
)
return keys
def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None:
def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None:
if "signing_key" in config:
return
@@ -393,7 +393,7 @@ TRUSTED_KEY_SERVERS_SCHEMA = {
def _parse_key_servers(
key_servers: List[Any], federation_verify_certificates: bool
key_servers: list[Any], federation_verify_certificates: bool
) -> Iterator[TrustedKeyServer]:
try:
jsonschema.validate(key_servers, TRUSTED_KEY_SERVERS_SCHEMA)
@@ -408,7 +408,7 @@ def _parse_key_servers(
server_name = server["server_name"]
result = TrustedKeyServer(server_name=server_name)
verify_keys: Optional[Dict[str, str]] = server.get("verify_keys")
verify_keys: Optional[dict[str, str]] = server.get("verify_keys")
if verify_keys is not None:
result.verify_keys = {}
for key_id, key_base64 in verify_keys.items():

View File

@@ -26,7 +26,7 @@ import os
import sys
import threading
from string import Template
from typing import TYPE_CHECKING, Any, Dict, Optional
from typing import TYPE_CHECKING, Any, Optional
import yaml
from zope.interface import implementer
@@ -186,7 +186,7 @@ class LoggingConfig(Config):
help=argparse.SUPPRESS,
)
def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None:
def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None:
log_config = config.get("log_config")
if log_config and not os.path.exists(log_config):
log_file = self.abspath("homeserver.log")

View File

@@ -18,7 +18,7 @@
# [This file includes modifications made by New Vector Limited]
#
#
from typing import Any, Dict, List, Tuple
from typing import Any
from synapse.config._base import Config, ConfigError
from synapse.types import JsonDict
@@ -29,7 +29,7 @@ class ModulesConfig(Config):
section = "modules"
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.loaded_modules: List[Tuple[Any, Dict]] = []
self.loaded_modules: list[tuple[Any, dict]] = []
configured_modules = config.get("modules") or []
for i, module in enumerate(configured_modules):

View File

@@ -21,7 +21,7 @@
import importlib.resources as importlib_resources
import json
import re
from typing import Any, Dict, Iterable, List, Optional, Pattern
from typing import Any, Iterable, Optional, Pattern
from urllib import parse as urlparse
import attr
@@ -37,9 +37,9 @@ class OEmbedEndpointConfig:
# The API endpoint to fetch.
api_endpoint: str
# The patterns to match.
url_patterns: List[Pattern[str]]
url_patterns: list[Pattern[str]]
# The supported formats.
formats: Optional[List[str]]
formats: Optional[list[str]]
class OembedConfig(Config):
@@ -48,10 +48,10 @@ class OembedConfig(Config):
section = "oembed"
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
oembed_config: Dict[str, Any] = config.get("oembed") or {}
oembed_config: dict[str, Any] = config.get("oembed") or {}
# A list of patterns which will be used.
self.oembed_patterns: List[OEmbedEndpointConfig] = list(
self.oembed_patterns: list[OEmbedEndpointConfig] = list(
self._parse_and_validate_providers(oembed_config)
)
@@ -92,7 +92,7 @@ class OembedConfig(Config):
)
def _parse_and_validate_provider(
self, providers: List[JsonDict], config_path: StrSequence
self, providers: list[JsonDict], config_path: StrSequence
) -> Iterable[OEmbedEndpointConfig]:
# Ensure it is the proper form.
validate_config(

View File

@@ -21,7 +21,7 @@
#
from collections import Counter
from typing import Any, Collection, Iterable, List, Mapping, Optional, Tuple, Type
from typing import Any, Collection, Iterable, Mapping, Optional
import attr
@@ -213,7 +213,7 @@ def _parse_oidc_provider_configs(config: JsonDict) -> Iterable["OidcProviderConf
def _parse_oidc_config_dict(
oidc_config: JsonDict, config_path: Tuple[str, ...]
oidc_config: JsonDict, config_path: tuple[str, ...]
) -> "OidcProviderConfig":
"""Take the configuration dict and parse it into an OidcProviderConfig
@@ -416,7 +416,7 @@ class OidcProviderConfig:
# Valid values are 'auto', 'always', and 'never'.
pkce_method: str
id_token_signing_alg_values_supported: Optional[List[str]]
id_token_signing_alg_values_supported: Optional[list[str]]
"""
List of the JWS signing algorithms (`alg` values) that are supported for signing the
`id_token`.
@@ -491,13 +491,13 @@ class OidcProviderConfig:
allow_existing_users: bool
# the class of the user mapping provider
user_mapping_provider_class: Type
user_mapping_provider_class: type
# the config of the user mapping provider
user_mapping_provider_config: Any
# required attributes to require in userinfo to allow login/registration
attribute_requirements: List[SsoAttributeRequirement]
attribute_requirements: list[SsoAttributeRequirement]
# Whether automatic registrations are enabled in the ODIC flow. Defaults to True
enable_registration: bool

View File

@@ -19,7 +19,7 @@
#
#
from typing import Any, List, Tuple, Type
from typing import Any
from synapse.types import JsonDict
from synapse.util.module_loader import load_module
@@ -56,7 +56,7 @@ class PasswordAuthProviderConfig(Config):
for backwards compatibility.
"""
self.password_providers: List[Tuple[Type, Any]] = []
self.password_providers: list[tuple[type, Any]] = []
providers = []
# We want to be backwards compatible with the old `ldap_config`

View File

@@ -19,7 +19,7 @@
#
#
from typing import Any, Dict, Optional, cast
from typing import Any, Optional, cast
import attr
@@ -37,9 +37,9 @@ class RatelimitSettings:
@classmethod
def parse(
cls,
config: Dict[str, Any],
config: dict[str, Any],
key: str,
defaults: Optional[Dict[str, float]] = None,
defaults: Optional[dict[str, float]] = None,
) -> "RatelimitSettings":
"""Parse config[key] as a new-style rate limiter config.
@@ -62,7 +62,7 @@ class RatelimitSettings:
# By this point we should have hit the rate limiter parameters.
# We don't actually check this though!
rl_config = cast(Dict[str, float], rl_config)
rl_config = cast(dict[str, float], rl_config)
return cls(
key=key,

View File

@@ -20,7 +20,7 @@
#
#
import argparse
from typing import Any, Dict, Optional
from typing import Any, Optional
from synapse.api.constants import RoomCreationPreset
from synapse.config._base import Config, ConfigError, read_file
@@ -266,7 +266,7 @@ class RegistrationConfig(Config):
else:
return ""
def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None:
def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None:
# if 'registration_shared_secret_path' is specified, and the target file
# does not exist, generate it.
registration_shared_secret_path = config.get("registration_shared_secret_path")

View File

@@ -21,7 +21,7 @@
import logging
import os
from typing import Any, Dict, List, Tuple
from typing import Any
import attr
@@ -80,8 +80,8 @@ class MediaStorageProviderConfig:
def parse_thumbnail_requirements(
thumbnail_sizes: List[JsonDict],
) -> Dict[str, Tuple[ThumbnailRequirement, ...]]:
thumbnail_sizes: list[JsonDict],
) -> dict[str, tuple[ThumbnailRequirement, ...]]:
"""Takes a list of dictionaries with "width", "height", and "method" keys
and creates a map from image media types to the thumbnail size, thumbnailing
method, and thumbnail media type to precalculate
@@ -92,7 +92,7 @@ def parse_thumbnail_requirements(
Returns:
Dictionary mapping from media type string to list of ThumbnailRequirement.
"""
requirements: Dict[str, List[ThumbnailRequirement]] = {}
requirements: dict[str, list[ThumbnailRequirement]] = {}
for size in thumbnail_sizes:
width = size["width"]
height = size["height"]
@@ -206,7 +206,7 @@ class ContentRepositoryConfig(Config):
#
# We don't create the storage providers here as not all workers need
# them to be started.
self.media_storage_providers: List[tuple] = []
self.media_storage_providers: list[tuple] = []
for i, provider_config in enumerate(storage_providers):
# We special case the module "file_system" so as not to need to
@@ -298,7 +298,7 @@ class ContentRepositoryConfig(Config):
self.enable_authenticated_media = config.get("enable_authenticated_media", True)
self.media_upload_limits: List[MediaUploadLimit] = []
self.media_upload_limits: list[MediaUploadLimit] = []
for limit_config in config.get("media_upload_limits", []):
time_period_ms = self.parse_duration(limit_config["time_period"])
max_bytes = self.parse_size(limit_config["max_size"])

View File

@@ -20,7 +20,7 @@
#
import logging
from typing import Any, List, Optional
from typing import Any, Optional
import attr
@@ -119,7 +119,7 @@ class RetentionConfig(Config):
" greater than 'allowed_lifetime_max'"
)
self.retention_purge_jobs: List[RetentionPurgeJob] = []
self.retention_purge_jobs: list[RetentionPurgeJob] = []
for purge_job_config in retention_config.get("purge_jobs", []):
interval_config = purge_job_config.get("interval")

View File

@@ -20,7 +20,7 @@
#
import logging
from typing import Any, List, Set
from typing import Any
from synapse.config.sso import SsoAttributeRequirement
from synapse.types import JsonDict
@@ -160,8 +160,11 @@ class SAML2Config(Config):
)
# Get the desired saml auth response attributes from the module
# type-ignore: the provider class was already checked for having the method being called
# with the runtime checks above, which mypy is not aware of, and treats as an error
# ever since the typehint of provider class was changed from "typing.Type" to "type"
saml2_config_dict = self._default_saml_config_dict(
*self.saml2_user_mapping_provider_class.get_saml_attributes(
*self.saml2_user_mapping_provider_class.get_saml_attributes( # type: ignore[attr-defined]
self.saml2_user_mapping_provider_config
)
)
@@ -191,7 +194,7 @@ class SAML2Config(Config):
)
def _default_saml_config_dict(
self, required_attributes: Set[str], optional_attributes: Set[str]
self, required_attributes: set[str], optional_attributes: set[str]
) -> JsonDict:
"""Generate a configuration dictionary with required and optional attributes that
will be needed to process new user registration
@@ -239,7 +242,7 @@ ATTRIBUTE_REQUIREMENTS_SCHEMA = {
def _parse_attribute_requirements_def(
attribute_requirements: Any,
) -> List[SsoAttributeRequirement]:
) -> list[SsoAttributeRequirement]:
validate_config(
ATTRIBUTE_REQUIREMENTS_SCHEMA,
attribute_requirements,

View File

@@ -25,7 +25,7 @@ import logging
import os.path
import urllib.parse
from textwrap import indent
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, TypedDict, Union
from typing import Any, Iterable, Optional, TypedDict, Union
from urllib.request import getproxies_environment
import attr
@@ -213,7 +213,7 @@ KNOWN_RESOURCES = {
@attr.s(frozen=True)
class HttpResourceConfig:
names: List[str] = attr.ib(
names: list[str] = attr.ib(
factory=list,
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)),
)
@@ -228,8 +228,8 @@ class HttpListenerConfig:
"""Object describing the http-specific parts of the config of a listener"""
x_forwarded: bool = False
resources: List[HttpResourceConfig] = attr.Factory(list)
additional_resources: Dict[str, dict] = attr.Factory(dict)
resources: list[HttpResourceConfig] = attr.Factory(list)
additional_resources: dict[str, dict] = attr.Factory(dict)
tag: Optional[str] = None
request_id_header: Optional[str] = None
@@ -239,7 +239,7 @@ class TCPListenerConfig:
"""Object describing the configuration of a single TCP listener."""
port: int = attr.ib(validator=attr.validators.instance_of(int))
bind_addresses: List[str] = attr.ib(validator=attr.validators.instance_of(List))
bind_addresses: list[str] = attr.ib(validator=attr.validators.instance_of(list))
type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
tls: bool = False
@@ -344,7 +344,7 @@ class ProxyConfig:
"""
Proxy server to use for HTTPS requests.
"""
no_proxy_hosts: Optional[List[str]]
no_proxy_hosts: Optional[list[str]]
"""
List of hosts, IP addresses, or IP ranges in CIDR format which should not use the
proxy. Synapse will directly connect to these hosts.
@@ -864,11 +864,11 @@ class ServerConfig(Config):
)
# Whitelist of domain names that given next_link parameters must have
next_link_domain_whitelist: Optional[List[str]] = config.get(
next_link_domain_whitelist: Optional[list[str]] = config.get(
"next_link_domain_whitelist"
)
self.next_link_domain_whitelist: Optional[Set[str]] = None
self.next_link_domain_whitelist: Optional[set[str]] = None
if next_link_domain_whitelist is not None:
if not isinstance(next_link_domain_whitelist, list):
raise ConfigError("'next_link_domain_whitelist' must be a list")
@@ -892,7 +892,7 @@ class ServerConfig(Config):
config.get("use_account_validity_in_account_status") or False
)
self.rooms_to_exclude_from_sync: List[str] = (
self.rooms_to_exclude_from_sync: list[str] = (
config.get("exclude_rooms_from_sync") or []
)
@@ -927,7 +927,7 @@ class ServerConfig(Config):
data_dir_path: str,
server_name: str,
open_private_ports: bool,
listeners: Optional[List[dict]],
listeners: Optional[list[dict]],
**kwargs: Any,
) -> str:
_, bind_port = parse_and_validate_server_name(server_name)
@@ -1028,7 +1028,7 @@ class ServerConfig(Config):
help="Turn on the twisted telnet manhole service on the given port.",
)
def read_gc_intervals(self, durations: Any) -> Optional[Tuple[float, float, float]]:
def read_gc_intervals(self, durations: Any) -> Optional[tuple[float, float, float]]:
"""Reads the three durations for the GC min interval option, returning seconds."""
if durations is None:
return None
@@ -1048,7 +1048,7 @@ class ServerConfig(Config):
def is_threepid_reserved(
reserved_threepids: List[JsonDict], threepid: JsonDict
reserved_threepids: list[JsonDict], threepid: JsonDict
) -> bool:
"""Check the threepid against the reserved threepid config
Args:
@@ -1066,8 +1066,8 @@ def is_threepid_reserved(
def read_gc_thresholds(
thresholds: Optional[List[Any]],
) -> Optional[Tuple[int, int, int]]:
thresholds: Optional[list[Any]],
) -> Optional[tuple[int, int, int]]:
"""Reads the three integer thresholds for garbage collection. Ensures that
the thresholds are integers if thresholds are supplied.
"""

View File

@@ -19,7 +19,7 @@
#
import logging
from typing import Any, Dict, List, Tuple
from typing import Any
from synapse.config import ConfigError
from synapse.types import JsonDict
@@ -41,7 +41,7 @@ class SpamCheckerConfig(Config):
section = "spamchecker"
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.spam_checkers: List[Tuple[Any, Dict]] = []
self.spam_checkers: list[tuple[Any, dict]] = []
spam_checkers = config.get("spam_checker") or []
if isinstance(spam_checkers, dict):

View File

@@ -19,7 +19,7 @@
#
#
import logging
from typing import Any, Dict, List, Optional
from typing import Any, Optional
import attr
@@ -45,7 +45,7 @@ class SsoAttributeRequirement:
attribute: str
# If neither `value` nor `one_of` is given, the attribute must simply exist.
value: Optional[str] = None
one_of: Optional[List[str]] = None
one_of: Optional[list[str]] = None
JSON_SCHEMA = {
"type": "object",
@@ -64,7 +64,7 @@ class SSOConfig(Config):
section = "sso"
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
sso_config: Dict[str, Any] = config.get("sso") or {}
sso_config: dict[str, Any] = config.get("sso") or {}
# The sso-specific template_dir
self.sso_template_dir = sso_config.get("template_dir")

View File

@@ -20,7 +20,7 @@
#
import logging
from typing import Any, List, Optional, Pattern
from typing import Any, Optional, Pattern
from matrix_common.regex import glob_to_regex
@@ -84,7 +84,7 @@ class TlsConfig(Config):
fed_whitelist_entries = []
# Support globs (*) in whitelist values
self.federation_certificate_verification_whitelist: List[Pattern] = []
self.federation_certificate_verification_whitelist: list[Pattern] = []
for entry in fed_whitelist_entries:
try:
entry_regex = glob_to_regex(entry.encode("ascii").decode("ascii"))

View File

@@ -19,7 +19,7 @@
#
#
from typing import Any, List, Set
from typing import Any
from synapse.types import JsonDict
from synapse.util.check_dependencies import check_requirements
@@ -42,7 +42,7 @@ class TracerConfig(Config):
{"sampler": {"type": "const", "param": 1}, "logging": False},
)
self.force_tracing_for_users: Set[str] = set()
self.force_tracing_for_users: set[str] = set()
if not self.opentracer_enabled:
return
@@ -51,7 +51,7 @@ class TracerConfig(Config):
# The tracer is enabled so sanitize the config
self.opentracer_whitelist: List[str] = opentracing_config.get(
self.opentracer_whitelist: list[str] = opentracing_config.get(
"homeserver_whitelist", []
)
if not isinstance(self.opentracer_whitelist, list):

View File

@@ -12,7 +12,7 @@
# <https://www.gnu.org/licenses/agpl-3.0.html>.
#
from typing import Any, List, Optional
from typing import Any, Optional
from synapse.api.constants import UserTypes
from synapse.types import JsonDict
@@ -29,9 +29,9 @@ class UserTypesConfig(Config):
self.default_user_type: Optional[str] = user_types.get(
"default_user_type", None
)
self.extra_user_types: List[str] = user_types.get("extra_user_types", [])
self.extra_user_types: list[str] = user_types.get("extra_user_types", [])
all_user_types: List[str] = []
all_user_types: list[str] = []
all_user_types.extend(UserTypes.ALL_BUILTIN_USER_TYPES)
all_user_types.extend(self.extra_user_types)

View File

@@ -22,7 +22,7 @@
import argparse
import logging
from typing import Any, Dict, List, Optional, Union
from typing import Any, Optional, Union
import attr
@@ -79,7 +79,7 @@ MAIN_PROCESS_INSTANCE_MAP_NAME = "main"
logger = logging.getLogger(__name__)
def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]:
def _instance_to_list_converter(obj: Union[str, list[str]]) -> list[str]:
"""Helper for allowing parsing a string or list of strings to a config
option expecting a list of strings.
"""
@@ -142,39 +142,39 @@ class WriterLocations:
device_lists: The instances that write to the device list stream.
"""
events: List[str] = attr.ib(
events: list[str] = attr.ib(
default=[MAIN_PROCESS_INSTANCE_NAME],
converter=_instance_to_list_converter,
)
typing: List[str] = attr.ib(
typing: list[str] = attr.ib(
default=[MAIN_PROCESS_INSTANCE_NAME],
converter=_instance_to_list_converter,
)
to_device: List[str] = attr.ib(
to_device: list[str] = attr.ib(
default=[MAIN_PROCESS_INSTANCE_NAME],
converter=_instance_to_list_converter,
)
account_data: List[str] = attr.ib(
account_data: list[str] = attr.ib(
default=[MAIN_PROCESS_INSTANCE_NAME],
converter=_instance_to_list_converter,
)
receipts: List[str] = attr.ib(
receipts: list[str] = attr.ib(
default=[MAIN_PROCESS_INSTANCE_NAME],
converter=_instance_to_list_converter,
)
presence: List[str] = attr.ib(
presence: list[str] = attr.ib(
default=[MAIN_PROCESS_INSTANCE_NAME],
converter=_instance_to_list_converter,
)
push_rules: List[str] = attr.ib(
push_rules: list[str] = attr.ib(
default=[MAIN_PROCESS_INSTANCE_NAME],
converter=_instance_to_list_converter,
)
device_lists: List[str] = attr.ib(
device_lists: list[str] = attr.ib(
default=[MAIN_PROCESS_INSTANCE_NAME],
converter=_instance_to_list_converter,
)
thread_subscriptions: List[str] = attr.ib(
thread_subscriptions: list[str] = attr.ib(
default=["master"],
converter=_instance_to_list_converter,
)
@@ -190,8 +190,8 @@ class OutboundFederationRestrictedTo:
locations: list of instance locations to connect to proxy via.
"""
instances: Optional[List[str]]
locations: List[InstanceLocationConfig] = attr.Factory(list)
instances: Optional[list[str]]
locations: list[InstanceLocationConfig] = attr.Factory(list)
def __contains__(self, instance: str) -> bool:
# It feels a bit dirty to return `True` if `instances` is `None`, but it makes
@@ -295,7 +295,7 @@ class WorkerConfig(Config):
# A map from instance name to host/port of their HTTP replication endpoint.
# Check if the main process is declared. The main process itself doesn't need
# this data as it would never have to talk to itself.
instance_map: Dict[str, Any] = config.get("instance_map", {})
instance_map: dict[str, Any] = config.get("instance_map", {})
if self.instance_name is not MAIN_PROCESS_INSTANCE_NAME:
# TODO: The next 3 condition blocks can be deleted after some time has
@@ -342,7 +342,7 @@ class WorkerConfig(Config):
)
# type-ignore: the expression `Union[A, B]` is not a Type[Union[A, B]] currently
self.instance_map: Dict[str, InstanceLocationConfig] = (
self.instance_map: dict[str, InstanceLocationConfig] = (
parse_and_validate_mapping(
instance_map,
InstanceLocationConfig, # type: ignore[arg-type]
@@ -481,7 +481,7 @@ class WorkerConfig(Config):
def _should_this_worker_perform_duty(
self,
config: Dict[str, Any],
config: dict[str, Any],
legacy_master_option_name: str,
legacy_worker_app_name: str,
new_option_name: str,
@@ -574,11 +574,11 @@ class WorkerConfig(Config):
def _worker_names_performing_this_duty(
self,
config: Dict[str, Any],
config: dict[str, Any],
legacy_option_name: str,
legacy_app_name: str,
modern_instance_list_name: str,
) -> List[str]:
) -> list[str]:
"""
Retrieves the names of the workers handling a given duty, by either legacy
option or instance list.

View File

@@ -23,7 +23,7 @@
import collections.abc
import hashlib
import logging
from typing import Any, Callable, Dict, Tuple
from typing import Any, Callable
from canonicaljson import encode_canonical_json
from signedjson.sign import sign_json
@@ -80,8 +80,8 @@ def check_event_content_hash(
def compute_content_hash(
event_dict: Dict[str, Any], hash_algorithm: Hasher
) -> Tuple[str, bytes]:
event_dict: dict[str, Any], hash_algorithm: Hasher
) -> tuple[str, bytes]:
"""Compute the content hash of an event, which is the hash of the
unredacted event.
@@ -112,7 +112,7 @@ def compute_content_hash(
def compute_event_reference_hash(
event: EventBase, hash_algorithm: Hasher = hashlib.sha256
) -> Tuple[str, bytes]:
) -> tuple[str, bytes]:
"""Computes the event reference hash. This is the hash of the redacted
event.
@@ -139,7 +139,7 @@ def compute_event_signature(
event_dict: JsonDict,
signature_name: str,
signing_key: SigningKey,
) -> Dict[str, Dict[str, str]]:
) -> dict[str, dict[str, str]]:
"""Compute the signature of the event for the given name and key.
Args:

Some files were not shown because too many files have changed in this diff Show More