1
0

Compare commits

..

1 Commits

Author SHA1 Message Date
Travis Ralston
887bb56613 MSC4291 dev 2025-07-14 15:17:52 -06:00
547 changed files with 5584 additions and 17628 deletions

View File

@@ -120,7 +120,7 @@ jobs:
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Install Cosign
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
uses: sigstore/cosign-installer@398d4b0eeef1380460a10c8013a76f728fb906ac # v3.9.1
- name: Calculate docker image tag
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0

View File

@@ -263,43 +263,6 @@ jobs:
- run: cargo clippy --all-features -- -D warnings
lint-rust:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
# Install like a normal project from source with all optional dependencies
extras: all
install-project: "true"
poetry-version: "2.1.1"
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
# The `::error::` syntax is using GitHub Actions' error annotations, see
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
run: |
if git diff --quiet Cargo.lock; then
echo "Cargo.lock is up to date"
else
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
git diff --exit-code Cargo.lock
exit 1
fi
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
# for some of the unstable options we use in `.rustfmt.toml`.
lint-rustfmt:
runs-on: ubuntu-latest
needs: changes
@@ -311,8 +274,7 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
# We use nightly so that we can use some unstable options that we use in
# `.rustfmt.toml`.
# We use nightly so that it correctly groups together imports
toolchain: nightly-2025-04-23
components: rustfmt
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
@@ -347,7 +309,6 @@ jobs:
- check-lockfile
- lint-clippy
- lint-clippy-nightly
- lint-rust
- lint-rustfmt
- lint-readme
runs-on: ubuntu-latest
@@ -366,14 +327,13 @@ jobs:
lint-pydantic
lint-clippy
lint-clippy-nightly
lint-rust
lint-rustfmt
lint-readme
calculate-test-jobs:
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
# needs: linting-done
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -393,7 +353,6 @@ jobs:
- changes
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
@@ -427,24 +386,7 @@ jobs:
if: ${{ matrix.job.postgres-version }}
timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done
- run: |
(
while true; do
echo "......."
date
df -h | grep root
free -m
sleep 10
done
) &
MONITOR_PID=$!
poetry run trial --jobs=6 tests
STATUS=$?
kill $MONITOR_PID
exit $STATUS
- run: poetry run trial --jobs=6 tests
env:
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
SYNAPSE_POSTGRES_HOST: /var/run/postgresql

View File

@@ -16,10 +16,6 @@ jobs:
with:
project-url: "https://github.com/orgs/matrix-org/projects/67"
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
# This action will error if the issue already exists on the project. Which is
# common as `X-Needs-Info` will often be added to issues that are already in
# the triage queue. Prevent the whole job from failing in this case.
continue-on-error: true
- name: Set status
env:
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}

View File

@@ -1,6 +1 @@
# Unstable options are only available on a nightly toolchain and must be opted into
unstable_features = true
# `group_imports` is an unstable option that requires nightly Rust toolchain. Tracked by
# https://github.com/rust-lang/rustfmt/issues/5083
group_imports = "StdExternalCrate"

View File

@@ -1,94 +1,3 @@
# Synapse 1.135.0 (2025-08-01)
No significant changes since 1.135.0rc2.
# Synapse 1.135.0rc2 (2025-07-30)
### Bugfixes
- Fix user failing to deactivate with MAS when `/_synapse/mas` is handled by a worker. ([\#18716](https://github.com/element-hq/synapse/issues/18716))
### Internal Changes
- Fix performance regression introduced in [#18238](https://github.com/element-hq/synapse/issues/18238) by adding a cache to `is_server_admin`. ([\#18747](https://github.com/element-hq/synapse/issues/18747))
# Synapse 1.135.0rc1 (2025-07-22)
### Features
- Add `recaptcha_private_key_path` and `recaptcha_public_key_path` config option. ([\#17984](https://github.com/element-hq/synapse/issues/17984), [\#18684](https://github.com/element-hq/synapse/issues/18684))
- Add plain-text handling for rich-text topics as per [MSC3765](https://github.com/matrix-org/matrix-spec-proposals/pull/3765). ([\#18195](https://github.com/element-hq/synapse/issues/18195))
- If enabled by the user, server admins will see [soft failed](https://spec.matrix.org/v1.13/server-server-api/#soft-failure) events over the Client-Server API. ([\#18238](https://github.com/element-hq/synapse/issues/18238))
- Add experimental support for [MSC4277: Harmonizing the reporting endpoints](https://github.com/matrix-org/matrix-spec-proposals/pull/4277). ([\#18263](https://github.com/element-hq/synapse/issues/18263))
- Add ability to limit amount of media uploaded by a user in a given time period. ([\#18527](https://github.com/element-hq/synapse/issues/18527))
- Enable workers to write directly to the device lists stream and handle device list updates, reducing load on the main process. ([\#18581](https://github.com/element-hq/synapse/issues/18581))
- Support arbitrary profile fields. Contributed by @clokep. ([\#18635](https://github.com/element-hq/synapse/issues/18635))
- Advertise support for Matrix v1.12. ([\#18647](https://github.com/element-hq/synapse/issues/18647))
- Add an option to issue redactions as an admin user via the [admin redaction endpoint](https://element-hq.github.io/synapse/latest/admin_api/user_admin_api.html#redact-all-the-events-of-a-user). ([\#18671](https://github.com/element-hq/synapse/issues/18671))
- Add experimental and incomplete support for [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/blob/rei/msc_thread_subscriptions/proposals/4306-thread-subscriptions.md). ([\#18674](https://github.com/element-hq/synapse/issues/18674))
- Include `event_id` when getting state with `?format=event`. Contributed by @tulir @ Beeper. ([\#18675](https://github.com/element-hq/synapse/issues/18675))
### Bugfixes
- Fix CPU and database spinning when retrying sending events to servers whilst at the same time purging those events. ([\#18499](https://github.com/element-hq/synapse/issues/18499))
- Don't allow creation of tags with names longer than 255 bytes, [as per the spec](https://spec.matrix.org/v1.15/client-server-api/#events-14). ([\#18660](https://github.com/element-hq/synapse/issues/18660))
- Fix `sliding_sync_connections`-related errors when porting from SQLite to Postgres. ([\#18677](https://github.com/element-hq/synapse/issues/18677))
- Fix the MAS integration not working when Synapse is started with `--daemonize` or using `synctl`. ([\#18691](https://github.com/element-hq/synapse/issues/18691))
### Improved Documentation
- Document that some config options for the user directory are in violation of the Matrix spec. ([\#18548](https://github.com/element-hq/synapse/issues/18548))
- Update `rc_delayed_event_mgmt` docs to the actual nesting level. Contributed by @HarHarLinks. ([\#18692](https://github.com/element-hq/synapse/issues/18692))
### Internal Changes
- Add a dedicated internal API for Matrix Authentication Service to Synapse communication. ([\#18520](https://github.com/element-hq/synapse/issues/18520))
- Allow user registrations to be done on workers. ([\#18552](https://github.com/element-hq/synapse/issues/18552))
- Remove unnecessary HTTP replication calls. ([\#18564](https://github.com/element-hq/synapse/issues/18564))
- Refactor `Measure` block metrics to be homeserver-scoped. ([\#18601](https://github.com/element-hq/synapse/issues/18601))
- Refactor cache metrics to be homeserver-scoped. ([\#18604](https://github.com/element-hq/synapse/issues/18604))
- Unbreak "Latest dependencies" workflow by using the `--without dev` poetry option instead of removed `--no-dev`. ([\#18617](https://github.com/element-hq/synapse/issues/18617))
- Update URL Preview code to work with `lxml` 6.0.0+. ([\#18622](https://github.com/element-hq/synapse/issues/18622))
- Use `markdown-it-py` instead of `commonmark` in the release script. ([\#18637](https://github.com/element-hq/synapse/issues/18637))
- Fix typing errors with upgraded mypy version. ([\#18653](https://github.com/element-hq/synapse/issues/18653))
- Add doc comment explaining that config files are shallowly merged. ([\#18664](https://github.com/element-hq/synapse/issues/18664))
- Minor speed up of insertion into `stream_positions` table. ([\#18672](https://github.com/element-hq/synapse/issues/18672))
- Remove unused `allow_no_prev_events` option when creating an event. ([\#18676](https://github.com/element-hq/synapse/issues/18676))
- Clean up `MetricsResource` and Prometheus hacks. ([\#18687](https://github.com/element-hq/synapse/issues/18687))
- Fix dirty `Cargo.lock` changes appearing after install (`base64`). ([\#18689](https://github.com/element-hq/synapse/issues/18689))
- Prevent dirty `Cargo.lock` changes from install. ([\#18693](https://github.com/element-hq/synapse/issues/18693))
- Correct spelling of 'Admin token used' log line. ([\#18697](https://github.com/element-hq/synapse/issues/18697))
- Reduce log spam when client stops downloading media while it is being streamed to them. ([\#18699](https://github.com/element-hq/synapse/issues/18699))
### Updates to locked dependencies
* Bump authlib from 1.6.0 to 1.6.1. ([\#18704](https://github.com/element-hq/synapse/issues/18704))
* Bump base64 from 0.21.7 to 0.22.1. ([\#18666](https://github.com/element-hq/synapse/issues/18666))
* Bump jsonschema from 4.24.0 to 4.25.0. ([\#18707](https://github.com/element-hq/synapse/issues/18707))
* Bump lxml from 5.4.0 to 6.0.0. ([\#18631](https://github.com/element-hq/synapse/issues/18631))
* Bump mypy from 1.13.0 to 1.16.1. ([\#18653](https://github.com/element-hq/synapse/issues/18653))
* Bump once_cell from 1.19.0 to 1.21.3. ([\#18710](https://github.com/element-hq/synapse/issues/18710))
* Bump phonenumbers from 9.0.8 to 9.0.9. ([\#18681](https://github.com/element-hq/synapse/issues/18681))
* Bump ruff from 0.12.2 to 0.12.5. ([\#18683](https://github.com/element-hq/synapse/issues/18683), [\#18705](https://github.com/element-hq/synapse/issues/18705))
* Bump serde_json from 1.0.140 to 1.0.141. ([\#18709](https://github.com/element-hq/synapse/issues/18709))
* Bump sigstore/cosign-installer from 3.9.1 to 3.9.2. ([\#18708](https://github.com/element-hq/synapse/issues/18708))
* Bump types-jsonschema from 4.24.0.20250528 to 4.24.0.20250708. ([\#18682](https://github.com/element-hq/synapse/issues/18682))
# Synapse 1.134.0 (2025-07-15)
No significant changes since 1.134.0rc1.
# Synapse 1.134.0rc1 (2025-07-09)
### Features

597
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -8,7 +8,7 @@
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
implementation, written and maintained by `Element <https://element.io>`_.
`Matrix <https://github.com/matrix-org>`__ is the open standard for
secure and interoperable real-time communications. You can directly run
secure and interoperable real time communications. You can directly run
and manage the source code in this repository, available under an AGPL
license (or alternatively under a commercial license from Element).
There is no support provided by Element unless you have a
@@ -23,13 +23,13 @@ ESS builds on Synapse to offer a complete Matrix-based backend including the ful
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
giving admins the power to easily manage an organization-wide
deployment. It includes advanced identity management, auditing,
moderation and data retention options as well as Long-Term Support and
SLAs. ESS supports any Matrix-compatible client.
moderation and data retention options as well as Long Term Support and
SLAs. ESS can be used to support any Matrix-based frontend client.
.. contents::
🛠️ Installation and configuration
==================================
🛠️ Installing and configuration
===============================
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
@@ -133,7 +133,7 @@ connect from a client: see
An easy way to get started is to login or register via Element at
https://app.element.io/#/login or https://app.element.io/#/register respectively.
You will need to change the server you are logging into from ``matrix.org``
and instead specify a homeserver URL of ``https://<server_name>:8448``
and instead specify a Homeserver URL of ``https://<server_name>:8448``
(or just ``https://<server_name>`` if you are using a reverse proxy).
If you prefer to use another client, refer to our
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
@@ -162,15 +162,16 @@ the public internet. Without it, anyone can freely register accounts on your hom
This can be exploited by attackers to create spambots targeting the rest of the Matrix
federation.
Your new Matrix ID will be formed partly from the ``server_name``, and partly
from a localpart you specify when you create the account in the form of::
Your new user name will be formed partly from the ``server_name``, and partly
from a localpart you specify when you create the account. Your name will take
the form of::
@localpart:my.domain.name
(pronounced "at localpart on my dot domain dot name").
As when logging in, you will need to specify a "Custom server". Specify your
desired ``localpart`` in the 'Username' box.
desired ``localpart`` in the 'User name' box.
🎯 Troubleshooting and support
==============================
@@ -208,10 +209,10 @@ Identity servers have the job of mapping email addresses and other 3rd Party
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
before creating that mapping.
**Identity servers do not store accounts or credentials - these are stored and managed on homeservers.
Identity Servers are just for mapping 3rd Party IDs to Matrix IDs.**
**They are not where accounts or credentials are stored - these live on home
servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
This process is highly security-sensitive, as there is an obvious risk of spam if it
This process is very security-sensitive, as there is obvious risk of spam if it
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
term, we hope to create a decentralised system to manage it (`matrix-doc #712
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
@@ -237,9 +238,9 @@ email address.
We welcome contributions to Synapse from the community!
The best place to get started is our
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
This is part of our broader `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
information for Synapse developers as well as Synapse administrators.
This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
information for Synapse developers as well as Synapse administrators.
Developers might be particularly interested in:
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,

View File

@@ -19,17 +19,17 @@ def build(setup_kwargs: Dict[str, Any]) -> None:
# This flag is a no-op in the latest versions. Instead, we need to
# specify this in the `bdist_wheel` config below.
py_limited_api=True,
# We always build in release mode, as we can't distinguish
# between using `poetry` in development vs production.
# We force always building in release mode, as we can't tell the
# difference between using `poetry` in development vs production.
debug=False,
)
setup_kwargs.setdefault("rust_extensions", []).append(extension)
setup_kwargs["zip_safe"] = False
# We look up the minimum supported Python version with
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first Python
# We lookup the minimum supported python version by looking at
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first python
# version that matches. We then convert that into the `py_limited_api` form,
# e.g. cp39 for Python 3.9.
# e.g. cp39 for python 3.9.
py_limited_api: str
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
for minor_version in itertools.count(start=8):

View File

@@ -0,0 +1 @@
Add `recaptcha_private_key_path` and `recaptcha_public_key_path` config option.

View File

@@ -0,0 +1 @@
Add plain-text handling for rich-text topics as per [MSC3765](https://github.com/matrix-org/matrix-spec-proposals/pull/3765).

View File

@@ -0,0 +1 @@
If enabled by the user, server admins will see [soft failed](https://spec.matrix.org/v1.13/server-server-api/#soft-failure) events over the Client-Server API.

View File

@@ -0,0 +1 @@
Add experimental support for [MSC4277](https://github.com/matrix-org/matrix-spec-proposals/pull/4277).

View File

@@ -1 +0,0 @@
Add debug logging for HMAC digest verification failures when using the admin API to register users.

View File

@@ -1 +0,0 @@
Add configurable rate limiting for the creation of rooms.

View File

@@ -0,0 +1 @@
Add ability to limit amount uploaded by a user in a given time period.

View File

@@ -1 +0,0 @@
Add support for [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) - Redact on Kick/Ban.

1
changelog.d/18552.misc Normal file
View File

@@ -0,0 +1 @@
Allow user registrations to be done on workers.

1
changelog.d/18564.misc Normal file
View File

@@ -0,0 +1 @@
Remove unnecessary HTTP replication calls.

View File

@@ -1 +0,0 @@
Speed up upgrading a room with large numbers of banned users.

View File

@@ -1 +0,0 @@
Fix config documentation generation script on Windows by enforcing UTF-8.

View File

@@ -1 +0,0 @@
When admins enable themselves to see soft-failed events, they will also see if the cause is due to the policy server flagging them as spam via `unsigned`.

1
changelog.d/18617.misc Normal file
View File

@@ -0,0 +1 @@
Unbreak "Latest dependencies" workflow by using the `--without dev` poetry option instead of removed `--no-dev`.

1
changelog.d/18637.misc Normal file
View File

@@ -0,0 +1 @@
Use `markdown-it-py` instead of `commonmark` in the release script.

View File

@@ -1 +0,0 @@
Refactor `Counter` metrics to be homeserver-scoped.

1
changelog.d/18664.misc Normal file
View File

@@ -0,0 +1 @@
Add doc comment explaining that config files are shallowly merged.

View File

@@ -1 +0,0 @@
Refactor background process metrics to be homeserver-scoped.

1
changelog.d/18672.misc Normal file
View File

@@ -0,0 +1 @@
Minor speed up of insertion into `stream_positions` table.

View File

@@ -0,0 +1 @@
Add support for MSC4291: Room IDs as hashes of create event.

View File

@@ -1 +0,0 @@
Add ability to configure forward/outbound proxy via homeserver config instead of environment variables. See `http_proxy`, `https_proxy`, `no_proxy_hosts`.

View File

@@ -1 +0,0 @@
Allow return code 403 (allowed by C2S Spec since v1.2) when fetching profiles via federation.

View File

@@ -1 +0,0 @@
Minor improvements to README.

View File

@@ -1 +0,0 @@
Refactor `LaterGauge` metrics to be homeserver-scoped.

View File

@@ -1 +0,0 @@
Refactor `GaugeBucketCollector` metrics to be homeserver-scoped.

View File

@@ -1 +0,0 @@
Reduce database usage in Sliding Sync by not querying for background update completion after the update is known to be complete.

View File

@@ -1 +0,0 @@
Advertise experimental support for [MSC4306](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) through `/_matrix/clients/versions` if enabled.

View File

@@ -1 +0,0 @@
Improve order of validation and ratelimiting in room creation.

View File

@@ -1 +0,0 @@
Refactor `Histogram` metrics to be homeserver-scoped.

View File

@@ -1 +0,0 @@
Refactor `Gauge` metrics to be homeserver-scoped.

View File

@@ -1 +0,0 @@
Register the MSC4306 endpoints in the CS API when the experimental feature is enabled.

View File

@@ -1 +0,0 @@
Bump minimum version bound on Twisted to 21.2.0.

View File

@@ -1 +0,0 @@
Use `twisted.internet.testing` module in tests instead of deprecated `twisted.test.proto_helpers`.

View File

@@ -1 +0,0 @@
Bump minimum version bound on Twisted to 21.2.0.

View File

@@ -1 +0,0 @@
Remove obsolete `/send_event` replication endpoint.

View File

@@ -1 +0,0 @@
Update metrics linting to be able to handle custom metrics.

View File

@@ -1 +0,0 @@
Work around `twisted.protocols.amp.TooLong` error by reducing logging in some tests.

View File

@@ -1 +0,0 @@
Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`. See the relevant section in the upgrade notes for more information.

View File

@@ -1 +0,0 @@
Refactor cache metrics to be homeserver-scoped.

View File

@@ -1 +0,0 @@
Fix a long-standing bug where suspended users could not have server notices sent to them (a 403 was returned to the admin).

View File

@@ -1 +0,0 @@
Refactor `Histogram` metrics to be homeserver-scoped.

View File

@@ -1 +0,0 @@
Prevent "Move labelled issues to correct projects" GitHub Actions workflow from failing when an issue is already on the project board.

View File

@@ -1 +0,0 @@
Update implementation of [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to include automatic subscription conflict prevention as introduced in later drafts.

View File

@@ -1 +0,0 @@
Bump minimum supported Rust version (MSRV) to 1.82.0. Missed in [#18553](https://github.com/element-hq/synapse/pull/18553) (released in Synapse 1.134.0).

View File

@@ -1 +0,0 @@
Stable support for delegating authentication to [Matrix Authentication Service](https://github.com/element-hq/matrix-authentication-service/).

View File

@@ -1 +0,0 @@
Document that there can be multiple workers handling the `receipts` stream.

View File

@@ -1 +0,0 @@
Improve worker documentation for some device paths.

View File

@@ -1 +0,0 @@
Implement the push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306).

View File

@@ -1 +0,0 @@
Fix an issue that could cause logcontexts to be lost on rate-limited requests. Found by @realtyem.

View File

@@ -1 +0,0 @@
Make `Clock.sleep(..)` return a coroutine, so that mypy can catch places where we don't await on it.

View File

@@ -1 +0,0 @@
CI debugging.

View File

@@ -4396,7 +4396,7 @@
"exemplar": false,
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
"instant": false,
"legendFormat": "{{origin_server_name}} ",
"legendFormat": "{{server_name}} ",
"range": true,
"refId": "A"
}
@@ -4518,7 +4518,7 @@
"exemplar": false,
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
"instant": false,
"legendFormat": "{{destination_server_name}}",
"legendFormat": "{{server_name}}",
"range": true,
"refId": "A"
}

24
debian/changelog vendored
View File

@@ -1,27 +1,3 @@
matrix-synapse-py3 (1.135.0) stable; urgency=medium
* New Synapse release 1.135.0.
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Aug 2025 13:12:28 +0100
matrix-synapse-py3 (1.135.0~rc2) stable; urgency=medium
* New Synapse release 1.135.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jul 2025 12:19:14 +0100
matrix-synapse-py3 (1.135.0~rc1) stable; urgency=medium
* New Synapse release 1.135.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Jul 2025 12:08:37 +0100
matrix-synapse-py3 (1.134.0) stable; urgency=medium
* New Synapse release 1.134.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jul 2025 14:22:50 +0100
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
* New Synapse release 1.134.0rc1.

View File

@@ -54,6 +54,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
export SYNAPSE_WORKER_TYPES="\
event_persister:2, \
background_worker, \
frontend_proxy, \
event_creator, \
user_dir, \
media_repository, \
@@ -64,7 +65,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
client_reader, \
appservice, \
pusher, \
device_lists:2, \
stream_writers=account_data+presence+receipts+to_device+typing"
fi

View File

@@ -98,10 +98,6 @@ rc_delayed_event_mgmt:
per_second: 9999
burst_count: 9999
rc_room_creation:
per_second: 9999
burst_count: 9999
federation_rr_transactions_per_room_per_second: 9999
allow_device_name_lookup_over_federation: true

View File

@@ -178,9 +178,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/deactivate$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)",
"^/_matrix/client/(r0|v3)/delete_devices$",
"^/_matrix/client/versions$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
"^/_matrix/client/(r0|v3|unstable)/register$",
@@ -197,9 +194,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
"^/_matrix/client/(r0|v3|unstable)/notifications$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
@@ -271,6 +265,13 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"frontend_proxy": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"account_data": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
@@ -305,13 +306,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"device_lists": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
"endpoint_patterns": [],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"typing": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
@@ -328,15 +322,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"thread_subscriptions": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
"endpoint_patterns": [
"^/_matrix/client/unstable/io.element.msc4306/.*",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
}
# Templates for sections that may be inserted multiple times in config files
@@ -427,18 +412,16 @@ def add_worker_roles_to_shared_config(
# streams
instance_map = shared_config.setdefault("instance_map", {})
# This is a list of the stream_writers.
stream_writers = {
# This is a list of the stream_writers that there can be only one of. Events can be
# sharded, and therefore doesn't belong here.
singular_stream_writers = [
"account_data",
"events",
"device_lists",
"presence",
"receipts",
"to_device",
"typing",
"push_rules",
"thread_subscriptions",
}
]
# Worker-type specific sharding config. Now a single worker can fulfill multiple
# roles, check each.
@@ -448,11 +431,28 @@ def add_worker_roles_to_shared_config(
if "federation_sender" in worker_types_set:
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
if "event_persister" in worker_types_set:
# Event persisters write to the events stream, so we need to update
# the list of event stream writers
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
worker_name
)
# Map of stream writer instance names to host/ports combos
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
instance_map[worker_name] = {
"path": f"/run/worker.{worker_port}",
}
else:
instance_map[worker_name] = {
"host": "localhost",
"port": worker_port,
}
# Update the list of stream writers. It's convenient that the name of the worker
# type is the same as the stream to write. Iterate over the whole list in case there
# is more than one.
for worker in worker_types_set:
if worker in stream_writers:
if worker in singular_stream_writers:
shared_config.setdefault("stream_writers", {}).setdefault(
worker, []
).append(worker_name)
@@ -876,13 +876,6 @@ def generate_worker_files(
else:
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
# Special case for event_persister: those are just workers that write to
# the `events` stream. For other workers, the worker name is the same
# name of the stream they write to, but for some reason it is not the
# case for event_persister.
if "event_persister" in worker_types_set:
worker_types_set.add("events")
# Update the shared config with sharding-related options if necessary
add_worker_roles_to_shared_config(
shared_config, worker_types_set, worker_name, worker_port

View File

@@ -22,46 +22,4 @@ To receive soft failed events in APIs like `/sync` and `/messages`, set `return_
to `true` in the admin client config. When `false`, the normal behaviour of these endpoints is to
exclude soft failed events.
**Note**: If the policy server flagged the event as spam and that caused soft failure, that will be indicated
in the event's `unsigned` content like so:
```json
{
"type": "m.room.message",
"other": "event_fields_go_here",
"unsigned": {
"io.element.synapse.soft_failed": true,
"io.element.synapse.policy_server_spammy": true
}
}
```
Default: `false`
## See events marked spammy by policy servers
Learn more about policy servers from [MSC4284](https://github.com/matrix-org/matrix-spec-proposals/pull/4284).
Similar to `return_soft_failed_events`, clients logged in with admin accounts can see events which were
flagged by the policy server as spammy (and thus soft failed) by setting `return_policy_server_spammy_events`
to `true`.
`return_policy_server_spammy_events` may be `true` while `return_soft_failed_events` is `false` to only see
policy server-flagged events. When `return_soft_failed_events` is `true` however, `return_policy_server_spammy_events`
is always `true`.
Events which were flagged by the policy will be flagged as `io.element.synapse.policy_server_spammy` in the
event's `unsigned` content, like so:
```json
{
"type": "m.room.message",
"other": "event_fields_go_here",
"unsigned": {
"io.element.synapse.soft_failed": true,
"io.element.synapse.policy_server_spammy": true
}
}
```
Default: `true` if `return_soft_failed_events` is `true`, otherwise `false`

View File

@@ -1227,7 +1227,7 @@ See also the
## Controlling whether a user is shadow-banned
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
A shadow-banned users receives successful responses to their client-server API requests,
but the events are not propagated into rooms. This can be an effective tool as it
(hopefully) takes longer for the user to realise they are being moderated before
@@ -1464,11 +1464,8 @@ _Added in Synapse 1.72.0._
## Redact all the events of a user
This endpoint allows an admin to redact the events of a given user. There are no restrictions on
redactions for a local user. By default, we puppet the user who sent the message to redact it themselves.
Redactions for non-local users are issued using the admin user, and will fail in rooms where the
admin user is not admin/does not have the specified power level to issue redactions. An option
is provided to override the default and allow the admin to issue the redactions in all cases.
This endpoint allows an admin to redact the events of a given user. There are no restrictions on redactions for a
local user. By default, we puppet the user who sent the message to redact it themselves. Redactions for non-local users are issued using the admin user, and will fail in rooms where the admin user is not admin/does not have the specified power level to issue redactions.
The API is
```
@@ -1478,7 +1475,7 @@ POST /_synapse/admin/v1/user/$user_id/redact
"rooms": ["!roomid1", "!roomid2"]
}
```
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
otherwise all the events in the rooms provided in the request will be redacted.
The API starts redaction process running, and returns immediately with a JSON body with
@@ -1504,10 +1501,7 @@ The following JSON body parameter must be provided:
The following JSON body parameters are optional:
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided.
- `use_admin` - If set to `true`, the admin user is used to issue the redactions, rather than puppeting the user. Useful
when the admin is also the moderator of the rooms that require redactions. Note that the redactions will fail in rooms
where the admin does not have the sufficient power level to issue the redactions.
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
_Added in Synapse 1.116.0._

View File

@@ -7,23 +7,8 @@ proxy is supported, not SOCKS proxy or anything else.
## Configure
The proxy settings can be configured in the homeserver configuration file via
[`http_proxy`](../usage/configuration/config_documentation.md#http_proxy),
[`https_proxy`](../usage/configuration/config_documentation.md#https_proxy), and
[`no_proxy_hosts`](../usage/configuration/config_documentation.md#no_proxy_hosts).
`homeserver.yaml` example:
```yaml
http_proxy: http://USERNAME:PASSWORD@10.0.1.1:8080/
https_proxy: http://USERNAME:PASSWORD@proxy.example.com:8080/
no_proxy_hosts:
- master.hostname.example.com
- 10.1.0.0/16
- 172.30.0.0/16
```
The proxy settings can also be configured via the `http_proxy`, `https_proxy`,
`no_proxy` environment variables. The environment variable is not case sensitive.
The `http_proxy`, `https_proxy`, `no_proxy` environment variables are used to
specify proxy settings. The environment variable is not case sensitive.
- `http_proxy`: Proxy server to use for HTTP requests.
- `https_proxy`: Proxy server to use for HTTPS requests.
- `no_proxy`: Comma-separated list of hosts, IP addresses, or IP ranges in CIDR
@@ -59,7 +44,7 @@ The proxy will be **used** for:
- phone-home stats
- recaptcha validation
- CAS auth validation
- OpenID Connect (OIDC)
- OpenID Connect
- Outbound federation
- Federation (checking public key revocation)
- Fetching public keys of other servers
@@ -68,7 +53,7 @@ The proxy will be **used** for:
It will **not be used** for:
- Application Services
- Matrix Identity servers
- Identity servers
- In worker configurations
- connections between workers
- connections from workers to Redis

View File

@@ -117,77 +117,6 @@ each upgrade are complete before moving on to the next upgrade, to avoid
stacking them up. You can monitor the currently running background updates with
[the Admin API](usage/administration/admin_api/background_updates.html#status).
# Upgrading to v1.136.0
## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`
The `run_as_background_process` function is now a method of the `ModuleApi` class. If
you were using the function directly from the module API, it will continue to work fine
but the background process metrics will not include an accurate `server_name` label.
This kind of metric labeling isn't relevant for many use cases and is used to
differentiate Synapse instances running in the same Python process (relevant to Synapse
Pro: Small Hosts). We recommend updating your usage to use the new
`ModuleApi.run_as_background_process` method to stay on top of future changes.
<details>
<summary>Example <code>run_as_background_process</code> upgrade</summary>
Before:
```python
class MyModule:
def __init__(self, module_api: ModuleApi) -> None:
run_as_background_process(__name__ + ":setup_database", self.setup_database)
```
After:
```python
class MyModule:
def __init__(self, module_api: ModuleApi) -> None:
module_api.run_as_background_process(__name__ + ":setup_database", self.setup_database)
```
</details>
## Metric labels have changed on `synapse_federation_last_received_pdu_time` and `synapse_federation_last_sent_pdu_time`
Previously, the `synapse_federation_last_received_pdu_time` and
`synapse_federation_last_sent_pdu_time` metrics both used the `server_name` label to
differentiate between different servers that we send and receive events from.
Since we're now using the `server_name` label to differentiate between different Synapse
homeserver instances running in the same process, these metrics have been changed as follows:
- `synapse_federation_last_received_pdu_time` now uses the `origin_server_name` label
- `synapse_federation_last_sent_pdu_time` now uses the `destination_server_name` label
The Grafana dashboard JSON in `contrib/grafana/synapse.json` has been updated to reflect
this change but you will need to manually update your own existing Grafana dashboards
using these metrics.
## Stable integration with Matrix Authentication Service
Support for [Matrix Authentication Service (MAS)](https://github.com/element-hq/matrix-authentication-service) is now stable, with a simplified configuration.
This stable integration requires MAS 0.20.0 or later.
The existing `experimental_features.msc3861` configuration option is now deprecated and will be removed in Synapse v1.137.0.
Synapse deployments already using MAS should now use the new configuration options:
```yaml
matrix_authentication_service:
# Enable the MAS integration
enabled: true
# The base URL where Synapse will contact MAS
endpoint: http://localhost:8080
# The shared secret used to authenticate MAS requests, must be the same as `matrix.secret` in the MAS configuration
# See https://element-hq.github.io/matrix-authentication-service/reference/configuration.html#matrix
secret: "asecurerandomsecretstring"
```
They must remove the `experimental_features.msc3861` configuration option from their configuration.
They can also remove the client previously used by Synapse [in the MAS configuration](https://element-hq.github.io/matrix-authentication-service/reference/configuration.html#clients) as it is no longer in use.
# Upgrading to v1.135.0
## `on_user_registration` module API callback may now run on any worker
@@ -208,10 +137,10 @@ native ICU library on your system is no longer required.
## Documented endpoint which can be delegated to a federation worker
The endpoint `^/_matrix/federation/v1/version$` can be delegated to a federation
worker. This is not new behaviour, but had not been documented yet. The
[list of delegatable endpoints](workers.md#synapseappgeneric_worker) has
worker. This is not new behaviour, but had not been documented yet. The
[list of delegatable endpoints](workers.md#synapseappgeneric_worker) has
been updated to include it. Make sure to check your reverse proxy rules if you
are using workers.
are using workers.
# Upgrading to v1.126.0

View File

@@ -610,61 +610,6 @@ manhole_settings:
ssh_pub_key_path: CONFDIR/id_rsa.pub
```
---
### `http_proxy`
*(string|null)* Proxy server to use for HTTP requests.
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md). There is no default for this option.
Example configuration:
```yaml
http_proxy: http://USERNAME:PASSWORD@10.0.1.1:8080/
```
---
### `https_proxy`
*(string|null)* Proxy server to use for HTTPS requests.
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md). There is no default for this option.
Example configuration:
```yaml
https_proxy: http://USERNAME:PASSWORD@proxy.example.com:8080/
```
---
### `no_proxy_hosts`
*(array)* List of hosts, IP addresses, or IP ranges in CIDR format which should not use the proxy. Synapse will directly connect to these hosts.
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md). There is no default for this option.
Example configuration:
```yaml
no_proxy_hosts:
- master.hostname.example.com
- 10.1.0.0/16
- 172.30.0.0/16
```
---
### `matrix_authentication_service`
*(object)* The `matrix_authentication_service` setting configures integration with [Matrix Authentication Service (MAS)](https://github.com/element-hq/matrix-authentication-service).
This setting has the following sub-options:
* `enabled` (boolean): Whether or not to enable the MAS integration. If this is set to `false`, Synapse will use its legacy internal authentication API. Defaults to `false`.
* `endpoint` (string): The URL where Synapse can reach MAS. This *must* have the `discovery` and `oauth` resources mounted. Defaults to `"http://localhost:8080"`.
* `secret` (string|null): A shared secret that will be used to authenticate requests from and to MAS.
* `secret_path` (string|null): Alternative to `secret`, reading the shared secret from a file. The file should be a plain text file, containing only the secret. Synapse reads the secret from the given file once at startup.
Example configuration:
```yaml
matrix_authentication_service:
enabled: true
secret: someverysecuresecret
endpoint: http://localhost:8080
```
---
### `dummy_events_threshold`
*(integer)* Forward extremities can build up in a room due to networking delays between homeservers. Once this happens in a large room, calculation of the state of that room can become quite expensive. To mitigate this, once the number of forward extremities reaches a given threshold, Synapse will send an `org.matrix.dummy_event` event, which will reduce the forward extremities in the room.
@@ -1980,8 +1925,9 @@ This setting has the following sub-options:
Default configuration:
```yaml
rc_delayed_event_mgmt:
per_second: 1.0
burst_count: 5.0
per_user:
per_second: 1.0
burst_count: 5.0
```
Example configuration:
@@ -2018,31 +1964,6 @@ rc_reports:
burst_count: 20.0
```
---
### `rc_room_creation`
*(object)* Sets rate limits for how often users are able to create rooms.
This setting has the following sub-options:
* `per_second` (number): Maximum number of requests a client can send per second.
* `burst_count` (number): Maximum number of requests a client can send before being throttled.
Default configuration:
```yaml
rc_room_creation:
per_user:
per_second: 0.016
burst_count: 10.0
```
Example configuration:
```yaml
rc_room_creation:
per_second: 1.0
burst_count: 5.0
```
---
### `federation_rr_transactions_per_room_per_second`
*(integer)* Sets outgoing federation transaction frequency for sending read-receipts, per-room.
@@ -2442,7 +2363,7 @@ recaptcha_public_key: YOUR_PUBLIC_KEY
The file should be a plain text file, containing only the public key. Synapse reads the public key from the given file once at startup.
_Added in Synapse 1.135.0._
_Added in Synapse 1.134.0._
Defaults to `null`.
@@ -2466,7 +2387,7 @@ recaptcha_private_key: YOUR_PRIVATE_KEY
The file should be a plain text file, containing only the private key. Synapse reads the private key from the given file once at startup.
_Added in Synapse 1.135.0._
_Added in Synapse 1.134.0._
Defaults to `null`.
@@ -3887,11 +3808,7 @@ encryption_enabled_by_default_for_room_type: invite
This setting has the following sub-options:
* `enabled` (boolean): Defines whether users can search the user directory. If `false` then empty responses are returned to all queries.
*Warning: While the homeserver may determine which subset of users are searched, the Matrix specification requires homeservers to include (at minimum) users visible in public rooms and users sharing a room with the requester. Using `false` improves performance but violates this requirement.*
Defaults to `true`.
* `enabled` (boolean): Defines whether users can search the user directory. If false then empty responses are returned to all queries. Defaults to `true`.
* `search_all_users` (boolean): Defines whether to search all users visible to your homeserver at the time the search is performed. If set to true, will return all users known to the homeserver matching the search query. If false, search results will only contain users visible in public rooms and users sharing a room with the requester.
@@ -4421,8 +4338,6 @@ This setting has the following sub-options:
* `push_rules` (string): Name of a worker assigned to the `push_rules` stream.
* `device_lists` (string): Name of a worker assigned to the `device_lists` stream.
Example configuration:
```yaml
stream_writers:

View File

@@ -238,9 +238,7 @@ information.
^/_matrix/client/unstable/im.nheko.summary/summary/.*$
^/_matrix/client/(r0|v3|unstable)/account/3pid$
^/_matrix/client/(r0|v3|unstable)/account/whoami$
^/_matrix/client/(r0|v3|unstable)/account/deactivate$
^/_matrix/client/(r0|v3)/delete_devices$
^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)
^/_matrix/client/(r0|v3|unstable)/devices$
^/_matrix/client/versions$
^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event/
@@ -259,9 +257,7 @@ information.
^/_matrix/client/(r0|v3|unstable)/keys/changes$
^/_matrix/client/(r0|v3|unstable)/keys/claim$
^/_matrix/client/(r0|v3|unstable)/room_keys/
^/_matrix/client/(r0|v3|unstable)/keys/upload
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$
^/_matrix/client/(r0|v3|unstable)/keys/upload$
# Registration/login requests
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
@@ -286,6 +282,7 @@ Additionally, the following REST endpoints can be handled for GET requests:
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
^/_matrix/client/(api/v1|r0|v3|unstable)/devices/
# Account data requests
^/_matrix/client/(r0|v3|unstable)/.*/tags
@@ -332,6 +329,7 @@ set to `true`), the following endpoints can be handled by the worker:
^/_synapse/admin/v2/users/[^/]+$
^/_synapse/admin/v1/username_available$
^/_synapse/admin/v1/users/[^/]+/_allow_cross_signing_replacement_without_uia$
# Only the GET method:
^/_synapse/admin/v1/users/[^/]+/devices$
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
@@ -532,9 +530,8 @@ the stream writer for the `account_data` stream:
##### The `receipts` stream
The `receipts` stream supports multiple writers. The following endpoints
can be handled by any worker, but should be routed directly to one of the workers
configured as stream writer for the `receipts` stream:
The following endpoints should be routed directly to the worker configured as
the stream writer for the `receipts` stream:
^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt
^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers
@@ -553,18 +550,6 @@ the stream writer for the `push_rules` stream:
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
##### The `device_lists` stream
The `device_lists` stream supports multiple writers. The following endpoints
can be handled by any worker, but should be routed directly to one of the workers
configured as stream writer for the `device_lists` stream:
^/_matrix/client/(r0|v3)/delete_devices$
^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)
^/_matrix/client/(r0|v3|unstable)/keys/upload
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$
^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$
#### Restrict outbound federation traffic to a specific set of workers
The

View File

@@ -1,17 +1,6 @@
[mypy]
namespace_packages = True
# Our custom mypy plugin should remain first in this list.
#
# mypy has a limitation where it only chooses the first plugin that returns a non-None
# value for each hook (known-limitation, c.f.
# https://github.com/python/mypy/issues/19524). We workaround this by putting our custom
# plugin first in the plugin order and then manually calling any other conflicting
# plugin hooks in our own plugin followed by our own checks.
#
# If you add a new plugin, make sure to check whether the hooks being used conflict with
# our custom plugin hooks and if so, manually call the other plugin's hooks in our
# custom plugin. (also applies to if the plugin is updated in the future)
plugins = scripts-dev/mypy_synapse_plugin.py, pydantic.mypy, mypy_zope:plugin
plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
follow_imports = normal
show_error_codes = True
show_traceback = True
@@ -110,6 +99,3 @@ ignore_missing_imports = True
[mypy-multipart.*]
ignore_missing_imports = True
[mypy-mypy_zope.*]
ignore_missing_imports = True

473
poetry.lock generated
View File

@@ -34,15 +34,15 @@ tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" a
[[package]]
name = "authlib"
version = "1.6.1"
version = "1.6.0"
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\""
files = [
{file = "authlib-1.6.1-py2.py3-none-any.whl", hash = "sha256:e9d2031c34c6309373ab845afc24168fe9e93dc52d252631f52642f21f5ed06e"},
{file = "authlib-1.6.1.tar.gz", hash = "sha256:4dffdbb1460ba6ec8c17981a4c67af7d8af131231b5a36a88a1e8c80c111cdfd"},
{file = "authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d"},
{file = "authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210"},
]
[package.dependencies]
@@ -435,7 +435,7 @@ description = "XML bomb protection for Python stdlib modules"
optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
@@ -478,7 +478,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"},
{file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"},
@@ -504,19 +504,18 @@ smmap = ">=3.0.1,<6"
[[package]]
name = "gitpython"
version = "3.1.45"
version = "3.1.44"
description = "GitPython is a Python library used to interact with Git repositories"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77"},
{file = "gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c"},
{file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"},
{file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"},
]
[package.dependencies]
gitdb = ">=4.0.1,<5"
typing-extensions = {version = ">=3.10.0.2", markers = "python_version < \"3.10\""}
[package.extras]
doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"]
@@ -529,7 +528,7 @@ description = "Python wrapper for hiredis"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"all\" or extra == \"redis\""
markers = "extra == \"redis\" or extra == \"all\""
files = [
{file = "hiredis-3.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:add17efcbae46c5a6a13b244ff0b4a8fa079602ceb62290095c941b42e9d5dec"},
{file = "hiredis-3.2.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:5fe955cc4f66c57df1ae8e5caf4de2925d43b5efab4e40859662311d1bcc5f54"},
@@ -866,7 +865,7 @@ description = "Jaeger Python OpenTracing Tracer implementation"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
]
@@ -937,14 +936,14 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "jsonschema"
version = "4.25.0"
version = "4.24.0"
description = "An implementation of JSON Schema validation for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716"},
{file = "jsonschema-4.25.0.tar.gz", hash = "sha256:e63acf5c11762c0e6672ffb61482bdf57f0876684d8d249c0fe2d730d48bc55f"},
{file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"},
{file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"},
]
[package.dependencies]
@@ -955,7 +954,7 @@ rpds-py = ">=0.7.1"
[package.extras]
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"]
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"]
[[package]]
name = "jsonschema-specifications"
@@ -1004,7 +1003,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
files = [
{file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
{file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
@@ -1015,107 +1014,145 @@ pyasn1 = ">=0.4.6"
[[package]]
name = "lxml"
version = "6.0.0"
version = "5.4.0"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
optional = true
python-versions = ">=3.8"
python-versions = ">=3.6"
groups = ["main"]
markers = "extra == \"all\" or extra == \"url-preview\""
markers = "extra == \"url-preview\" or extra == \"all\""
files = [
{file = "lxml-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:35bc626eec405f745199200ccb5c6b36f202675d204aa29bb52e27ba2b71dea8"},
{file = "lxml-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:246b40f8a4aec341cbbf52617cad8ab7c888d944bfe12a6abd2b1f6cfb6f6082"},
{file = "lxml-6.0.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2793a627e95d119e9f1e19720730472f5543a6d84c50ea33313ce328d870f2dd"},
{file = "lxml-6.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:46b9ed911f36bfeb6338e0b482e7fe7c27d362c52fde29f221fddbc9ee2227e7"},
{file = "lxml-6.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b4790b558bee331a933e08883c423f65bbcd07e278f91b2272489e31ab1e2b4"},
{file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2030956cf4886b10be9a0285c6802e078ec2391e1dd7ff3eb509c2c95a69b76"},
{file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23854ecf381ab1facc8f353dcd9adeddef3652268ee75297c1164c987c11dc"},
{file = "lxml-6.0.0-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:43fe5af2d590bf4691531b1d9a2495d7aab2090547eaacd224a3afec95706d76"},
{file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74e748012f8c19b47f7d6321ac929a9a94ee92ef12bc4298c47e8b7219b26541"},
{file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:43cfbb7db02b30ad3926e8fceaef260ba2fb7df787e38fa2df890c1ca7966c3b"},
{file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:34190a1ec4f1e84af256495436b2d196529c3f2094f0af80202947567fdbf2e7"},
{file = "lxml-6.0.0-cp310-cp310-win32.whl", hash = "sha256:5967fe415b1920a3877a4195e9a2b779249630ee49ece22021c690320ff07452"},
{file = "lxml-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:f3389924581d9a770c6caa4df4e74b606180869043b9073e2cec324bad6e306e"},
{file = "lxml-6.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:522fe7abb41309e9543b0d9b8b434f2b630c5fdaf6482bee642b34c8c70079c8"},
{file = "lxml-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ee56288d0df919e4aac43b539dd0e34bb55d6a12a6562038e8d6f3ed07f9e36"},
{file = "lxml-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8dd6dd0e9c1992613ccda2bcb74fc9d49159dbe0f0ca4753f37527749885c25"},
{file = "lxml-6.0.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d7ae472f74afcc47320238b5dbfd363aba111a525943c8a34a1b657c6be934c3"},
{file = "lxml-6.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5592401cdf3dc682194727c1ddaa8aa0f3ddc57ca64fd03226a430b955eab6f6"},
{file = "lxml-6.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58ffd35bd5425c3c3b9692d078bf7ab851441434531a7e517c4984d5634cd65b"},
{file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f720a14aa102a38907c6d5030e3d66b3b680c3e6f6bc95473931ea3c00c59967"},
{file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a5e8d207311a0170aca0eb6b160af91adc29ec121832e4ac151a57743a1e1e"},
{file = "lxml-6.0.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2dd1cc3ea7e60bfb31ff32cafe07e24839df573a5e7c2d33304082a5019bcd58"},
{file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cfcf84f1defed7e5798ef4f88aa25fcc52d279be731ce904789aa7ccfb7e8d2"},
{file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a52a4704811e2623b0324a18d41ad4b9fabf43ce5ff99b14e40a520e2190c851"},
{file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c16304bba98f48a28ae10e32a8e75c349dd742c45156f297e16eeb1ba9287a1f"},
{file = "lxml-6.0.0-cp311-cp311-win32.whl", hash = "sha256:f8d19565ae3eb956d84da3ef367aa7def14a2735d05bd275cd54c0301f0d0d6c"},
{file = "lxml-6.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2d71cdefda9424adff9a3607ba5bbfc60ee972d73c21c7e3c19e71037574816"},
{file = "lxml-6.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a2e76efbf8772add72d002d67a4c3d0958638696f541734304c7f28217a9cab"},
{file = "lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108"},
{file = "lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be"},
{file = "lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab"},
{file = "lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563"},
{file = "lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7"},
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7"},
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991"},
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da"},
{file = "lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e"},
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741"},
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3"},
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16"},
{file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0"},
{file = "lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a"},
{file = "lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3"},
{file = "lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb"},
{file = "lxml-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6da7cd4f405fd7db56e51e96bff0865b9853ae70df0e6720624049da76bde2da"},
{file = "lxml-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b34339898bb556a2351a1830f88f751679f343eabf9cf05841c95b165152c9e7"},
{file = "lxml-6.0.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:51a5e4c61a4541bd1cd3ba74766d0c9b6c12d6a1a4964ef60026832aac8e79b3"},
{file = "lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81"},
{file = "lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1"},
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24"},
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a"},
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29"},
{file = "lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4"},
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca"},
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf"},
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f"},
{file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef"},
{file = "lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181"},
{file = "lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e"},
{file = "lxml-6.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:21db1ec5525780fd07251636eb5f7acb84003e9382c72c18c542a87c416ade03"},
{file = "lxml-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4eb114a0754fd00075c12648d991ec7a4357f9cb873042cc9a77bf3a7e30c9db"},
{file = "lxml-6.0.0-cp38-cp38-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:7da298e1659e45d151b4028ad5c7974917e108afb48731f4ed785d02b6818994"},
{file = "lxml-6.0.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7bf61bc4345c1895221357af8f3e89f8c103d93156ef326532d35c707e2fb19d"},
{file = "lxml-6.0.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63b634facdfbad421d4b61c90735688465d4ab3a8853ac22c76ccac2baf98d97"},
{file = "lxml-6.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e380e85b93f148ad28ac15f8117e2fd8e5437aa7732d65e260134f83ce67911b"},
{file = "lxml-6.0.0-cp38-cp38-win32.whl", hash = "sha256:185efc2fed89cdd97552585c624d3c908f0464090f4b91f7d92f8ed2f3b18f54"},
{file = "lxml-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:f97487996a39cb18278ca33f7be98198f278d0bc3c5d0fd4d7b3d63646ca3c8a"},
{file = "lxml-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85b14a4689d5cff426c12eefe750738648706ea2753b20c2f973b2a000d3d261"},
{file = "lxml-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f64ccf593916e93b8d36ed55401bb7fe9c7d5de3180ce2e10b08f82a8f397316"},
{file = "lxml-6.0.0-cp39-cp39-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:b372d10d17a701b0945f67be58fae4664fd056b85e0ff0fbc1e6c951cdbc0512"},
{file = "lxml-6.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a674c0948789e9136d69065cc28009c1b1874c6ea340253db58be7622ce6398f"},
{file = "lxml-6.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:edf6e4c8fe14dfe316939711e3ece3f9a20760aabf686051b537a7562f4da91a"},
{file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:048a930eb4572829604982e39a0c7289ab5dc8abc7fc9f5aabd6fbc08c154e93"},
{file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0b5fa5eda84057a4f1bbb4bb77a8c28ff20ae7ce211588d698ae453e13c6281"},
{file = "lxml-6.0.0-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:c352fc8f36f7e9727db17adbf93f82499457b3d7e5511368569b4c5bd155a922"},
{file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8db5dc617cb937ae17ff3403c3a70a7de9df4852a046f93e71edaec678f721d0"},
{file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:2181e4b1d07dde53986023482673c0f1fba5178ef800f9ab95ad791e8bdded6a"},
{file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3c98d5b24c6095e89e03d65d5c574705be3d49c0d8ca10c17a8a4b5201b72f5"},
{file = "lxml-6.0.0-cp39-cp39-win32.whl", hash = "sha256:04d67ceee6db4bcb92987ccb16e53bef6b42ced872509f333c04fb58a3315256"},
{file = "lxml-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e0b1520ef900e9ef62e392dd3d7ae4f5fa224d1dd62897a792cf353eb20b6cae"},
{file = "lxml-6.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:e35e8aaaf3981489f42884b59726693de32dabfc438ac10ef4eb3409961fd402"},
{file = "lxml-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:dbdd7679a6f4f08152818043dbb39491d1af3332128b3752c3ec5cebc0011a72"},
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40442e2a4456e9910875ac12951476d36c0870dcb38a68719f8c4686609897c4"},
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db0efd6bae1c4730b9c863fc4f5f3c0fa3e8f05cae2c44ae141cb9dfc7d091dc"},
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ab542c91f5a47aaa58abdd8ea84b498e8e49fe4b883d67800017757a3eb78e8"},
{file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:013090383863b72c62a702d07678b658fa2567aa58d373d963cca245b017e065"},
{file = "lxml-6.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c86df1c9af35d903d2b52d22ea3e66db8058d21dc0f59842ca5deb0595921141"},
{file = "lxml-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4337e4aec93b7c011f7ee2e357b0d30562edd1955620fdd4aeab6aacd90d43c5"},
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ae74f7c762270196d2dda56f8dd7309411f08a4084ff2dfcc0b095a218df2e06"},
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:059c4cbf3973a621b62ea3132934ae737da2c132a788e6cfb9b08d63a0ef73f9"},
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f090a9bc0ce8da51a5632092f98a7e7f84bca26f33d161a98b57f7fb0004ca"},
{file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9da022c14baeec36edfcc8daf0e281e2f55b950249a455776f0d1adeeada4734"},
{file = "lxml-6.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a55da151d0b0c6ab176b4e761670ac0e2667817a1e0dadd04a01d0561a219349"},
{file = "lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72"},
{file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"},
{file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"},
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf"},
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28"},
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609"},
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4"},
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7"},
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f"},
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997"},
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c"},
{file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b"},
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b"},
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563"},
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5"},
{file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776"},
{file = "lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7"},
{file = "lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250"},
{file = "lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9"},
{file = "lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7"},
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa"},
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df"},
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e"},
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44"},
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba"},
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba"},
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c"},
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8"},
{file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86"},
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056"},
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7"},
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd"},
{file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751"},
{file = "lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4"},
{file = "lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539"},
{file = "lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4"},
{file = "lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d"},
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779"},
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e"},
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9"},
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5"},
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5"},
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4"},
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e"},
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7"},
{file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079"},
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20"},
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8"},
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f"},
{file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc"},
{file = "lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f"},
{file = "lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2"},
{file = "lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0"},
{file = "lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de"},
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76"},
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d"},
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422"},
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551"},
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c"},
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff"},
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60"},
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8"},
{file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982"},
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61"},
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54"},
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b"},
{file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a"},
{file = "lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82"},
{file = "lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f"},
{file = "lxml-5.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7be701c24e7f843e6788353c055d806e8bd8466b52907bafe5d13ec6a6dbaecd"},
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb54f7c6bafaa808f27166569b1511fc42701a7713858dddc08afdde9746849e"},
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97dac543661e84a284502e0cf8a67b5c711b0ad5fb661d1bd505c02f8cf716d7"},
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:c70e93fba207106cb16bf852e421c37bbded92acd5964390aad07cb50d60f5cf"},
{file = "lxml-5.4.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9c886b481aefdf818ad44846145f6eaf373a20d200b5ce1a5c8e1bc2d8745410"},
{file = "lxml-5.4.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:fa0e294046de09acd6146be0ed6727d1f42ded4ce3ea1e9a19c11b6774eea27c"},
{file = "lxml-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:61c7bbf432f09ee44b1ccaa24896d21075e533cd01477966a5ff5a71d88b2f56"},
{file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"},
{file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"},
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"},
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f7f991a68d20c75cb13c5c9142b2a3f9eb161f1f12a9489c82172d1f133c0"},
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"},
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ac7ba71f9561cd7d7b55e1ea5511543c0282e2b6450f122672a2694621d63b7e"},
{file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"},
{file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:ce31158630a6ac85bddd6b830cffd46085ff90498b397bd0a259f59d27a12188"},
{file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"},
{file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"},
{file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"},
{file = "lxml-5.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eaf24066ad0b30917186420d51e2e3edf4b0e2ea68d8cd885b14dc8afdcf6556"},
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b31a3a77501d86d8ade128abb01082724c0dfd9524f542f2f07d693c9f1175f"},
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e108352e203c7afd0eb91d782582f00a0b16a948d204d4dec8565024fafeea5"},
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11a96c3b3f7551c8a8109aa65e8594e551d5a84c76bf950da33d0fb6dfafab7"},
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ca755eebf0d9e62d6cb013f1261e510317a41bf4650f22963474a663fdfe02aa"},
{file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4cd915c0fb1bed47b5e6d6edd424ac25856252f09120e3e8ba5154b6b921860e"},
{file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:226046e386556a45ebc787871d6d2467b32c37ce76c2680f5c608e25823ffc84"},
{file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b108134b9667bcd71236c5a02aad5ddd073e372fb5d48ea74853e009fe38acb6"},
{file = "lxml-5.4.0-cp38-cp38-win32.whl", hash = "sha256:1320091caa89805df7dcb9e908add28166113dcd062590668514dbd510798c88"},
{file = "lxml-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:073eb6dcdf1f587d9b88c8c93528b57eccda40209cf9be549d469b942b41d70b"},
{file = "lxml-5.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bda3ea44c39eb74e2488297bb39d47186ed01342f0022c8ff407c250ac3f498e"},
{file = "lxml-5.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ceaf423b50ecfc23ca00b7f50b64baba85fb3fb91c53e2c9d00bc86150c7e40"},
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:664cdc733bc87449fe781dbb1f309090966c11cc0c0cd7b84af956a02a8a4729"},
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67ed8a40665b84d161bae3181aa2763beea3747f748bca5874b4af4d75998f87"},
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4a3bd174cc9cdaa1afbc4620c049038b441d6ba07629d89a83b408e54c35cd"},
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b0989737a3ba6cf2a16efb857fb0dfa20bc5c542737fddb6d893fde48be45433"},
{file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:dc0af80267edc68adf85f2a5d9be1cdf062f973db6790c1d065e45025fa26140"},
{file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:639978bccb04c42677db43c79bdaa23785dc7f9b83bfd87570da8207872f1ce5"},
{file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a99d86351f9c15e4a901fc56404b485b1462039db59288b203f8c629260a142"},
{file = "lxml-5.4.0-cp39-cp39-win32.whl", hash = "sha256:3e6d5557989cdc3ebb5302bbdc42b439733a841891762ded9514e74f60319ad6"},
{file = "lxml-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8c9b7f16b63e65bbba889acb436a1034a82d34fa09752d754f88d708eca80e1"},
{file = "lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55"},
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740"},
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5"},
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37"},
{file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571"},
{file = "lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4"},
{file = "lxml-5.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f11a1526ebd0dee85e7b1e39e39a0cc0d9d03fb527f56d8457f6df48a10dc0c"},
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4afaf38bf79109bb060d9016fad014a9a48fb244e11b94f74ae366a64d252"},
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de6f6bb8a7840c7bf216fb83eec4e2f79f7325eca8858167b68708b929ab2172"},
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5cca36a194a4eb4e2ed6be36923d3cffd03dcdf477515dea687185506583d4c9"},
{file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b7c86884ad23d61b025989d99bfdd92a7351de956e01c61307cb87035960bcb1"},
{file = "lxml-5.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:53d9469ab5460402c19553b56c3648746774ecd0681b1b27ea74d5d8a3ef5590"},
{file = "lxml-5.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56dbdbab0551532bb26c19c914848d7251d73edb507c3079d6805fa8bba5b706"},
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14479c2ad1cb08b62bb941ba8e0e05938524ee3c3114644df905d2331c76cd57"},
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32697d2ea994e0db19c1df9e40275ffe84973e4232b5c274f47e7c1ec9763cdd"},
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24f6df5f24fc3385f622c0c9d63fe34604893bc1a5bdbb2dbf5870f85f9a404a"},
{file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:151d6c40bc9db11e960619d2bf2ec5829f0aaffb10b41dcf6ad2ce0f3c0b2325"},
{file = "lxml-5.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4025bf2884ac4370a3243c5aa8d66d3cb9e15d3ddd0af2d796eccc5f0244390e"},
{file = "lxml-5.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9459e6892f59ecea2e2584ee1058f5d8f629446eab52ba2305ae13a32a059530"},
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47fb24cc0f052f0576ea382872b3fc7e1f7e3028e53299ea751839418ade92a6"},
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50441c9de951a153c698b9b99992e806b71c1f36d14b154592580ff4a9d0d877"},
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ab339536aa798b1e17750733663d272038bf28069761d5be57cb4a9b0137b4f8"},
{file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9776af1aad5a4b4a1317242ee2bea51da54b2a7b7b48674be736d463c999f37d"},
{file = "lxml-5.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:63e7968ff83da2eb6fdda967483a7a023aa497d85ad8f05c3ad9b1f2e8c84987"},
{file = "lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd"},
]
[package.extras]
@@ -1123,6 +1160,7 @@ cssselect = ["cssselect (>=0.7)"]
html-clean = ["lxml_html_clean"]
html5 = ["html5lib"]
htmlsoup = ["BeautifulSoup4"]
source = ["Cython (>=3.0.11,<3.1.0)"]
[[package]]
name = "lxml-stubs"
@@ -1261,7 +1299,7 @@ description = "An LDAP3 auth provider for Synapse"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
files = [
{file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"},
{file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"},
@@ -1387,51 +1425,50 @@ docs = ["sphinx (>=8,<9)", "sphinx-autobuild"]
[[package]]
name = "mypy"
version = "1.16.1"
version = "1.13.0"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.9"
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mypy-1.16.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4f0fed1022a63c6fec38f28b7fc77fca47fd490445c69d0a66266c59dd0b88a"},
{file = "mypy-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86042bbf9f5a05ea000d3203cf87aa9d0ccf9a01f73f71c58979eb9249f46d72"},
{file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea7469ee5902c95542bea7ee545f7006508c65c8c54b06dc2c92676ce526f3ea"},
{file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:352025753ef6a83cb9e7f2427319bb7875d1fdda8439d1e23de12ab164179574"},
{file = "mypy-1.16.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff9fa5b16e4c1364eb89a4d16bcda9987f05d39604e1e6c35378a2987c1aac2d"},
{file = "mypy-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:1256688e284632382f8f3b9e2123df7d279f603c561f099758e66dd6ed4e8bd6"},
{file = "mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc"},
{file = "mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782"},
{file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507"},
{file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca"},
{file = "mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4"},
{file = "mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6"},
{file = "mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d"},
{file = "mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9"},
{file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79"},
{file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15"},
{file = "mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd"},
{file = "mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b"},
{file = "mypy-1.16.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ddc91eb318c8751c69ddb200a5937f1232ee8efb4e64e9f4bc475a33719de438"},
{file = "mypy-1.16.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:87ff2c13d58bdc4bbe7dc0dedfe622c0f04e2cb2a492269f3b418df2de05c536"},
{file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a7cfb0fe29fe5a9841b7c8ee6dffb52382c45acdf68f032145b75620acfbd6f"},
{file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:051e1677689c9d9578b9c7f4d206d763f9bbd95723cd1416fad50db49d52f359"},
{file = "mypy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d5d2309511cc56c021b4b4e462907c2b12f669b2dbeb68300110ec27723971be"},
{file = "mypy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:4f58ac32771341e38a853c5d0ec0dfe27e18e27da9cdb8bbc882d2249c71a3ee"},
{file = "mypy-1.16.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7fc688329af6a287567f45cc1cefb9db662defeb14625213a5b7da6e692e2069"},
{file = "mypy-1.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e198ab3f55924c03ead626ff424cad1732d0d391478dfbf7bb97b34602395da"},
{file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09aa4f91ada245f0a45dbc47e548fd94e0dd5a8433e0114917dc3b526912a30c"},
{file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13c7cd5b1cb2909aa318a90fd1b7e31f17c50b242953e7dd58345b2a814f6383"},
{file = "mypy-1.16.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:58e07fb958bc5d752a280da0e890c538f1515b79a65757bbdc54252ba82e0b40"},
{file = "mypy-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:f895078594d918f93337a505f8add9bd654d1a24962b4c6ed9390e12531eb31b"},
{file = "mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37"},
{file = "mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab"},
{file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"},
{file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"},
{file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"},
{file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"},
{file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"},
{file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"},
{file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"},
{file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"},
{file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"},
{file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"},
{file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"},
{file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"},
{file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"},
{file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"},
{file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"},
{file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"},
{file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"},
{file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"},
{file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"},
{file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"},
{file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"},
{file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"},
{file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"},
{file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"},
{file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"},
{file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"},
{file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"},
{file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"},
{file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"},
{file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"},
{file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"},
{file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"},
]
[package.dependencies]
mypy_extensions = ">=1.0.0"
pathspec = ">=0.9.0"
mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing_extensions = ">=4.6.0"
typing-extensions = ">=4.6.0"
[package.extras]
dmypy = ["psutil (>=4.0)"]
@@ -1454,18 +1491,18 @@ files = [
[[package]]
name = "mypy-zope"
version = "1.0.13"
version = "1.0.12"
description = "Plugin for mypy to support zope interfaces"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "mypy_zope-1.0.13-py3-none-any.whl", hash = "sha256:13740c4cbc910cca2c143c6709e1c483c991abeeeb7b629ad6f73d8ac1edad15"},
{file = "mypy_zope-1.0.13.tar.gz", hash = "sha256:63fb4d035ea874baf280dc69e714dcde4bd2a4a4837a0fd8d90ce91bea510f99"},
{file = "mypy_zope-1.0.12-py3-none-any.whl", hash = "sha256:f2ecf169f886fbc266e9339db0c2f3818528a7536b9bb4f5ece1d5854dc2f27c"},
{file = "mypy_zope-1.0.12.tar.gz", hash = "sha256:d6f8f99eb5644885553b4ec7afc8d68f5daf412c9bf238ec3c36b65d97df6cbe"},
]
[package.dependencies]
mypy = ">=1.0.0,<1.18.0"
mypy = ">=1.0.0,<1.17.0"
"zope.interface" = "*"
"zope.schema" = "*"
@@ -1494,7 +1531,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
]
@@ -1529,28 +1566,16 @@ files = [
[package.extras]
dev = ["jinja2"]
[[package]]
name = "pathspec"
version = "0.12.1"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
name = "phonenumbers"
version = "9.0.10"
version = "9.0.8"
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "phonenumbers-9.0.10-py2.py3-none-any.whl", hash = "sha256:13b12d269be1f2b363c9bc2868656a7e2e8b50f1a1cef629c75005da6c374c6b"},
{file = "phonenumbers-9.0.10.tar.gz", hash = "sha256:c2d15a6a9d0534b14a7764f51246ada99563e263f65b80b0251d1a760ac4a1ba"},
{file = "phonenumbers-9.0.8-py2.py3-none-any.whl", hash = "sha256:53d357111c0ead0d6408ae443613b18d3a053431ca1ddf7e881457c0969afcf9"},
{file = "phonenumbers-9.0.8.tar.gz", hash = "sha256:16f03f2cf65b5eee99ed25827d810febcab92b5d76f977e425fcd2e4ca6d4865"},
]
[[package]]
@@ -1563,8 +1588,6 @@ groups = ["main"]
files = [
{file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"},
{file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"},
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"},
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"},
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"},
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"},
{file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"},
@@ -1574,8 +1597,6 @@ files = [
{file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"},
{file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"},
{file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"},
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"},
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"},
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"},
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"},
{file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"},
@@ -1585,8 +1606,6 @@ files = [
{file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"},
{file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"},
{file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"},
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"},
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"},
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"},
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"},
{file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"},
@@ -1599,8 +1618,6 @@ files = [
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"},
{file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"},
{file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"},
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"},
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"},
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"},
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"},
{file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"},
@@ -1610,8 +1627,6 @@ files = [
{file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"},
{file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"},
{file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"},
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"},
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"},
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"},
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"},
{file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"},
@@ -1621,8 +1636,6 @@ files = [
{file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"},
{file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"},
{file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"},
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"},
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"},
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"},
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"},
{file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"},
@@ -1632,8 +1645,6 @@ files = [
{file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"},
{file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"},
{file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"},
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"},
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"},
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"},
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"},
{file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"},
@@ -1643,8 +1654,6 @@ files = [
{file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"},
{file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"},
{file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"},
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"},
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"},
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"},
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"},
{file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"},
@@ -1654,15 +1663,11 @@ files = [
{file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"},
@@ -1700,7 +1705,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"all\" or extra == \"postgres\""
markers = "extra == \"postgres\" or extra == \"all\""
files = [
{file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"},
{file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"},
@@ -1721,7 +1726,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas
optional = true
python-versions = "*"
groups = ["main"]
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
files = [
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
]
@@ -1737,7 +1742,7 @@ description = "A Simple library to enable psycopg2 compatability"
optional = true
python-versions = "*"
groups = ["main"]
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
files = [
{file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"},
]
@@ -1997,7 +2002,7 @@ description = "A development tool to measure, monitor and analyze the memory beh
optional = true
python-versions = ">=3.6"
groups = ["main"]
markers = "extra == \"all\" or extra == \"cache-memory\""
markers = "extra == \"cache-memory\" or extra == \"all\""
files = [
{file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"},
{file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"},
@@ -2057,7 +2062,7 @@ description = "Python implementation of SAML Version 2 Standard"
optional = true
python-versions = ">=3.9,<4.0"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"},
{file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"},
@@ -2082,7 +2087,7 @@ description = "Extensions to the standard Python datetime module"
optional = true
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
@@ -2110,7 +2115,7 @@ description = "World timezone definitions, modern and historical"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
{file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
@@ -2409,30 +2414,30 @@ files = [
[[package]]
name = "ruff"
version = "0.12.7"
version = "0.12.2"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303"},
{file = "ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb"},
{file = "ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3"},
{file = "ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860"},
{file = "ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c"},
{file = "ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423"},
{file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb"},
{file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd"},
{file = "ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e"},
{file = "ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606"},
{file = "ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8"},
{file = "ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa"},
{file = "ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5"},
{file = "ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4"},
{file = "ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77"},
{file = "ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f"},
{file = "ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69"},
{file = "ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71"},
{file = "ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be"},
{file = "ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e"},
{file = "ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342"},
{file = "ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a"},
{file = "ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639"},
{file = "ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12"},
{file = "ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e"},
]
[[package]]
@@ -2470,15 +2475,15 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
[[package]]
name = "sentry-sdk"
version = "2.34.1"
version = "2.32.0"
description = "Python client for Sentry (https://sentry.io)"
optional = true
python-versions = ">=3.6"
groups = ["main"]
markers = "extra == \"all\" or extra == \"sentry\""
markers = "extra == \"sentry\" or extra == \"all\""
files = [
{file = "sentry_sdk-2.34.1-py2.py3-none-any.whl", hash = "sha256:b7a072e1cdc5abc48101d5146e1ae680fa81fe886d8d95aaa25a0b450c818d32"},
{file = "sentry_sdk-2.34.1.tar.gz", hash = "sha256:69274eb8c5c38562a544c3e9f68b5be0a43be4b697f5fd385bf98e4fbe672687"},
{file = "sentry_sdk-2.32.0-py2.py3-none-any.whl", hash = "sha256:6cf51521b099562d7ce3606da928c473643abe99b00ce4cb5626ea735f4ec345"},
{file = "sentry_sdk-2.32.0.tar.gz", hash = "sha256:9016c75d9316b0f6921ac14c8cd4fb938f26002430ac5be9945ab280f78bec6b"},
]
[package.dependencies]
@@ -2663,7 +2668,7 @@ description = "Tornado IOLoop Backed Concurrent Futures"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"},
{file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"},
@@ -2679,7 +2684,7 @@ description = "Python bindings for the Apache Thrift RPC system"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
]
@@ -2741,7 +2746,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"all\" or extra == \"opentracing\""
markers = "extra == \"opentracing\" or extra == \"all\""
files = [
{file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"},
{file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"},
@@ -2878,7 +2883,7 @@ description = "non-blocking redis client for python"
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"all\" or extra == \"redis\""
markers = "extra == \"redis\" or extra == \"all\""
files = [
{file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"},
{file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"},
@@ -2932,14 +2937,14 @@ files = [
[[package]]
name = "types-jsonschema"
version = "4.25.0.20250720"
version = "4.24.0.20250528"
description = "Typing stubs for jsonschema"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "types_jsonschema-4.25.0.20250720-py3-none-any.whl", hash = "sha256:7d7897c715310d8bf9ae27a2cedba78bbb09e4cad83ce06d2aa79b73a88941df"},
{file = "types_jsonschema-4.25.0.20250720.tar.gz", hash = "sha256:765a3b6144798fe3161fd8cbe570a756ed3e8c0e5adb7c09693eb49faad39dbd"},
{file = "types_jsonschema-4.24.0.20250528-py3-none-any.whl", hash = "sha256:6a906b5ff73ac11c8d1e0b6c30a9693e1e4e1ab56c56c932b3a7e081b86d187b"},
{file = "types_jsonschema-4.24.0.20250528.tar.gz", hash = "sha256:7e28c64e0ae7980eeb158105b20663fc6a6b8f81d5f86ea6614aa0014417bd1e"},
]
[package.dependencies]
@@ -2983,14 +2988,14 @@ files = [
[[package]]
name = "types-psycopg2"
version = "2.9.21.20250718"
version = "2.9.21.20250516"
description = "Typing stubs for psycopg2"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "types_psycopg2-2.9.21.20250718-py3-none-any.whl", hash = "sha256:bcf085d4293bda48f5943a46dadf0389b2f98f7e8007722f7e1c12ee0f541858"},
{file = "types_psycopg2-2.9.21.20250718.tar.gz", hash = "sha256:dc09a97272ef67e739e57b9f4740b761208f4514257e311c0b05c8c7a37d04b4"},
{file = "types_psycopg2-2.9.21.20250516-py3-none-any.whl", hash = "sha256:2a9212d1e5e507017b31486ce8147634d06b85d652769d7a2d91d53cb4edbd41"},
{file = "types_psycopg2-2.9.21.20250516.tar.gz", hash = "sha256:6721018279175cce10b9582202e2a2b4a0da667857ccf82a97691bdb5ecd610f"},
]
[[package]]
@@ -3209,7 +3214,7 @@ description = "An XML Schema validator and decoder"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"all\" or extra == \"saml2\""
markers = "extra == \"saml2\" or extra == \"all\""
files = [
{file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"},
{file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"},
@@ -3353,4 +3358,4 @@ url-preview = ["lxml"]
[metadata]
lock-version = "2.1"
python-versions = "^3.9.0"
content-hash = "600a349d08dde732df251583094a121b5385eb43ae0c6ceff10dcf9749359446"
content-hash = "1629969968eaaf20a8f49ee3cc0075b25e5dee7e81503fff66042ece72793103"

View File

@@ -101,7 +101,7 @@ module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
version = "1.135.0"
version = "1.134.0rc1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "AGPL-3.0-or-later"
@@ -178,13 +178,8 @@ signedjson = "^1.1.0"
service-identity = ">=18.1.0"
# Twisted 18.9 introduces some logger improvements that the structured
# logger utilises
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
# Twisted 21.2.0 introduces contextvar support.
# We could likely bump this to 22.1 without making distro packagers'
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
# RHEL 9: 22.10)
Twisted = {extras = ["tls"], version = ">=21.2.0"}
treq = ">=21.5.0"
Twisted = {extras = ["tls"], version = ">=18.9.0"}
treq = ">=15.1"
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
pyOpenSSL = ">=16.0.0"
PyYAML = ">=5.3"
@@ -200,9 +195,7 @@ pymacaroons = ">=0.13.0"
msgpack = ">=0.5.2"
phonenumbers = ">=8.2.0"
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
prometheus-client = ">=0.6.0"
prometheus-client = ">=0.4.0"
# we use `order`, which arrived in attrs 19.2.0.
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
attrs = ">=19.2.0,!=21.1.0"
@@ -324,7 +317,7 @@ all = [
# failing on new releases. Keeping lower bounds loose here means that dependabot
# can bump versions without having to update the content-hash in the lockfile.
# This helps prevents merge conflicts when running a batch of dependabot updates.
ruff = "0.12.7"
ruff = "0.12.2"
# Type checking only works with the pydantic.v1 compat module from pydantic v2
pydantic = "^2"

View File

@@ -7,7 +7,7 @@ name = "synapse"
version = "0.1.0"
edition = "2021"
rust-version = "1.82.0"
rust-version = "1.81.0"
[lib]
name = "synapse"
@@ -23,7 +23,7 @@ name = "synapse.synapse_rust"
[dependencies]
anyhow = "1.0.63"
base64 = "0.22.1"
base64 = "0.21.7"
bytes = "1.6.0"
headers = "0.4.0"
http = "1.1.0"
@@ -52,7 +52,6 @@ reqwest = { version = "0.12.15", default-features = false, features = [
http-body-util = "0.1.3"
futures = "0.3.31"
tokio = { version = "1.44.2", features = ["rt", "rt-multi-thread"] }
once_cell = "1.18.0"
[features]
extension-module = ["pyo3/extension-module"]

View File

@@ -61,7 +61,6 @@ fn bench_match_exact(b: &mut Bencher) {
vec![],
false,
false,
false,
)
.unwrap();
@@ -72,10 +71,10 @@ fn bench_match_exact(b: &mut Bencher) {
},
));
let matched = eval.match_condition(&condition, None, None, None).unwrap();
let matched = eval.match_condition(&condition, None, None).unwrap();
assert!(matched, "Didn't match");
b.iter(|| eval.match_condition(&condition, None, None, None).unwrap());
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
}
#[bench]
@@ -108,7 +107,6 @@ fn bench_match_word(b: &mut Bencher) {
vec![],
false,
false,
false,
)
.unwrap();
@@ -119,10 +117,10 @@ fn bench_match_word(b: &mut Bencher) {
},
));
let matched = eval.match_condition(&condition, None, None, None).unwrap();
let matched = eval.match_condition(&condition, None, None).unwrap();
assert!(matched, "Didn't match");
b.iter(|| eval.match_condition(&condition, None, None, None).unwrap());
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
}
#[bench]
@@ -155,7 +153,6 @@ fn bench_match_word_miss(b: &mut Bencher) {
vec![],
false,
false,
false,
)
.unwrap();
@@ -166,10 +163,10 @@ fn bench_match_word_miss(b: &mut Bencher) {
},
));
let matched = eval.match_condition(&condition, None, None, None).unwrap();
let matched = eval.match_condition(&condition, None, None).unwrap();
assert!(!matched, "Didn't match");
b.iter(|| eval.match_condition(&condition, None, None, None).unwrap());
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
}
#[bench]
@@ -202,7 +199,6 @@ fn bench_eval_message(b: &mut Bencher) {
vec![],
false,
false,
false,
)
.unwrap();
@@ -214,8 +210,7 @@ fn bench_eval_message(b: &mut Bencher) {
false,
false,
false,
false,
);
b.iter(|| eval.run(&rules, Some("bob"), Some("person"), None));
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));
}

View File

@@ -54,7 +54,6 @@ enum EventInternalMetadataData {
RecheckRedaction(bool),
SoftFailed(bool),
ProactivelySend(bool),
PolicyServerSpammy(bool),
Redacted(bool),
TxnId(Box<str>),
TokenId(i64),
@@ -97,13 +96,6 @@ impl EventInternalMetadataData {
.to_owned()
.into_any(),
),
EventInternalMetadataData::PolicyServerSpammy(o) => (
pyo3::intern!(py, "policy_server_spammy"),
o.into_pyobject(py)
.unwrap_infallible()
.to_owned()
.into_any(),
),
EventInternalMetadataData::Redacted(o) => (
pyo3::intern!(py, "redacted"),
o.into_pyobject(py)
@@ -163,11 +155,6 @@ impl EventInternalMetadataData {
.extract()
.with_context(|| format!("'{key_str}' has invalid type"))?,
),
"policy_server_spammy" => EventInternalMetadataData::PolicyServerSpammy(
value
.extract()
.with_context(|| format!("'{key_str}' has invalid type"))?,
),
"redacted" => EventInternalMetadataData::Redacted(
value
.extract()
@@ -440,17 +427,6 @@ impl EventInternalMetadata {
set_property!(self, ProactivelySend, obj);
}
#[getter]
fn get_policy_server_spammy(&self) -> PyResult<bool> {
Ok(get_property_opt!(self, PolicyServerSpammy)
.copied()
.unwrap_or(false))
}
#[setter]
fn set_policy_server_spammy(&mut self, obj: bool) {
set_property!(self, PolicyServerSpammy, obj);
}
#[getter]
fn get_redacted(&self) -> PyResult<bool> {
let bool = get_property!(self, Redacted)?;

View File

@@ -12,149 +12,58 @@
* <https://www.gnu.org/licenses/agpl-3.0.html>.
*/
use std::{collections::HashMap, future::Future};
use std::{collections::HashMap, future::Future, panic::AssertUnwindSafe, sync::LazyLock};
use anyhow::Context;
use futures::TryStreamExt;
use once_cell::sync::OnceCell;
use pyo3::{create_exception, exceptions::PyException, prelude::*};
use futures::{FutureExt, TryStreamExt};
use pyo3::{exceptions::PyException, prelude::*, types::PyString};
use reqwest::RequestBuilder;
use tokio::runtime::Runtime;
use crate::errors::HttpResponseException;
create_exception!(
synapse.synapse_rust.http_client,
RustPanicError,
PyException,
"A panic which happened in a Rust future"
);
/// The tokio runtime that we're using to run async Rust libs.
static RUNTIME: LazyLock<Runtime> = LazyLock::new(|| {
tokio::runtime::Builder::new_multi_thread()
.worker_threads(4)
.enable_all()
.build()
.unwrap()
});
impl RustPanicError {
fn from_panic(panic_err: &(dyn std::any::Any + Send + 'static)) -> PyErr {
// Apparently this is how you extract the panic message from a panic
let panic_message = if let Some(str_slice) = panic_err.downcast_ref::<&str>() {
str_slice
} else if let Some(string) = panic_err.downcast_ref::<String>() {
string
} else {
"unknown error"
};
Self::new_err(panic_message.to_owned())
}
}
/// A reference to the `Deferred` python class.
static DEFERRED_CLASS: LazyLock<PyObject> = LazyLock::new(|| {
Python::with_gil(|py| {
py.import("twisted.internet.defer")
.expect("module 'twisted.internet.defer' should be importable")
.getattr("Deferred")
.expect("module 'twisted.internet.defer' should have a 'Deferred' class")
.unbind()
})
});
/// This is the name of the attribute where we store the runtime on the reactor
static TOKIO_RUNTIME_ATTR: &str = "__synapse_rust_tokio_runtime";
/// A Python wrapper around a Tokio runtime.
///
/// This allows us to 'store' the runtime on the reactor instance, starting it
/// when the reactor starts, and stopping it when the reactor shuts down.
#[pyclass]
struct PyTokioRuntime {
runtime: Option<Runtime>,
}
#[pymethods]
impl PyTokioRuntime {
fn start(&mut self) -> PyResult<()> {
// TODO: allow customization of the runtime like the number of threads
let runtime = tokio::runtime::Builder::new_multi_thread()
.worker_threads(4)
.enable_all()
.build()?;
self.runtime = Some(runtime);
Ok(())
}
fn shutdown(&mut self) -> PyResult<()> {
let runtime = self
.runtime
.take()
.context("Runtime was already shutdown")?;
// Dropping the runtime will shut it down
drop(runtime);
Ok(())
}
}
impl PyTokioRuntime {
/// Get the handle to the Tokio runtime, if it is running.
fn handle(&self) -> PyResult<&tokio::runtime::Handle> {
let handle = self
.runtime
.as_ref()
.context("Tokio runtime is not running")?
.handle();
Ok(handle)
}
}
/// Get a handle to the Tokio runtime stored on the reactor instance, or create
/// a new one.
fn runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRuntime>> {
if !reactor.hasattr(TOKIO_RUNTIME_ATTR)? {
install_runtime(reactor)?;
}
get_runtime(reactor)
}
/// Install a new Tokio runtime on the reactor instance.
fn install_runtime(reactor: &Bound<PyAny>) -> PyResult<()> {
let py = reactor.py();
let runtime = PyTokioRuntime { runtime: None };
let runtime = runtime.into_pyobject(py)?;
// Attach the runtime to the reactor, starting it when the reactor is
// running, stopping it when the reactor is shutting down
reactor.call_method1("callWhenRunning", (runtime.getattr("start")?,))?;
reactor.call_method1(
"addSystemEventTrigger",
("after", "shutdown", runtime.getattr("shutdown")?),
)?;
reactor.setattr(TOKIO_RUNTIME_ATTR, runtime)?;
Ok(())
}
/// Get a reference to a Tokio runtime handle stored on the reactor instance.
fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRuntime>> {
// This will raise if `TOKIO_RUNTIME_ATTR` is not set or if it is
// not a `Runtime`. Careful that this could happen if the user sets it
// manually, or if multiple versions of `pyo3-twisted` are used!
let runtime: Bound<PyTokioRuntime> = reactor.getattr(TOKIO_RUNTIME_ATTR)?.extract()?;
Ok(runtime.borrow())
}
/// A reference to the `twisted.internet.defer` module.
static DEFER: OnceCell<PyObject> = OnceCell::new();
/// Access to the `twisted.internet.defer` module.
fn defer(py: Python<'_>) -> PyResult<&Bound<PyAny>> {
Ok(DEFER
.get_or_try_init(|| py.import("twisted.internet.defer").map(Into::into))?
.bind(py))
}
/// A reference to the twisted `reactor`.
static TWISTED_REACTOR: LazyLock<Py<PyModule>> = LazyLock::new(|| {
Python::with_gil(|py| {
py.import("twisted.internet.reactor")
.expect("module 'twisted.internet.reactor' should be importable")
.unbind()
})
});
/// Called when registering modules with python.
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
let child_module: Bound<'_, PyModule> = PyModule::new(py, "http_client")?;
child_module.add_class::<HttpClient>()?;
// Make sure we fail early if we can't load some modules
defer(py)?;
// Make sure we fail early if we can't build the lazy statics.
LazyLock::force(&RUNTIME);
LazyLock::force(&DEFERRED_CLASS);
m.add_submodule(&child_module)?;
// We need to manually add the module to sys.modules to make `from
// synapse.synapse_rust import http_client` work.
// synapse.synapse_rust import acl` work.
py.import("sys")?
.getattr("modules")?
.set_item("synapse.synapse_rust.http_client", child_module)?;
@@ -163,24 +72,26 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
}
#[pyclass]
#[derive(Clone)]
struct HttpClient {
client: reqwest::Client,
reactor: PyObject,
}
#[pymethods]
impl HttpClient {
#[new]
pub fn py_new(reactor: Bound<PyAny>, user_agent: &str) -> PyResult<HttpClient> {
// Make sure the runtime gets installed
let _ = runtime(&reactor)?;
pub fn py_new(user_agent: &str) -> PyResult<HttpClient> {
// The twisted reactor can only be imported after Synapse has been
// imported, to allow Synapse to change the twisted reactor. If we try
// and import the reactor too early twisted installs a default reactor,
// which can't be replaced.
LazyLock::force(&TWISTED_REACTOR);
Ok(HttpClient {
client: reqwest::Client::builder()
.user_agent(user_agent)
.build()
.context("building reqwest client")?,
reactor: reactor.unbind(),
})
}
@@ -218,7 +129,7 @@ impl HttpClient {
builder: RequestBuilder,
response_limit: usize,
) -> PyResult<Bound<'a, PyAny>> {
create_deferred(py, self.reactor.bind(py), async move {
create_deferred(py, async move {
let response = builder.send().await.context("sending request")?;
let status = response.status();
@@ -248,51 +159,43 @@ impl HttpClient {
/// tokio runtime.
///
/// Does not handle deferred cancellation or contextvars.
fn create_deferred<'py, F, O>(
py: Python<'py>,
reactor: &Bound<'py, PyAny>,
fut: F,
) -> PyResult<Bound<'py, PyAny>>
fn create_deferred<F, O>(py: Python, fut: F) -> PyResult<Bound<'_, PyAny>>
where
F: Future<Output = PyResult<O>> + Send + 'static,
for<'a> O: IntoPyObject<'a> + Send + 'static,
for<'a> O: IntoPyObject<'a>,
{
let deferred = defer(py)?.call_method0("Deferred")?;
let deferred = DEFERRED_CLASS.bind(py).call0()?;
let deferred_callback = deferred.getattr("callback")?.unbind();
let deferred_errback = deferred.getattr("errback")?.unbind();
let rt = runtime(reactor)?;
let handle = rt.handle()?;
let task = handle.spawn(fut);
// Unbind the reactor so that we can pass it to the task
let reactor = reactor.clone().unbind();
handle.spawn(async move {
let res = task.await;
RUNTIME.spawn(async move {
// TODO: Is it safe to assert unwind safety here? I think so, as we
// don't use anything that could be tainted by the panic afterwards.
// Note that `.spawn(..)` asserts unwind safety on the future too.
let res = AssertUnwindSafe(fut).catch_unwind().await;
Python::with_gil(move |py| {
// Flatten the panic into standard python error
let res = match res {
Ok(r) => r,
Err(join_err) => match join_err.try_into_panic() {
Ok(panic_err) => Err(RustPanicError::from_panic(&panic_err)),
Err(err) => Err(PyException::new_err(format!("Task cancelled: {err}"))),
},
Err(panic_err) => {
let panic_message = get_panic_message(&panic_err);
Err(PyException::new_err(
PyString::new(py, panic_message).unbind(),
))
}
};
// Re-bind the reactor
let reactor = reactor.bind(py);
// Send the result to the deferred, via `.callback(..)` or `.errback(..)`
match res {
Ok(obj) => {
reactor
.call_method("callFromThread", (deferred_callback, obj), None)
TWISTED_REACTOR
.call_method(py, "callFromThread", (deferred_callback, obj), None)
.expect("callFromThread should not fail"); // There's nothing we can really do with errors here
}
Err(err) => {
reactor
.call_method("callFromThread", (deferred_errback, err), None)
TWISTED_REACTOR
.call_method(py, "callFromThread", (deferred_errback, err), None)
.expect("callFromThread should not fail"); // There's nothing we can really do with errors here
}
}
@@ -301,3 +204,15 @@ where
Ok(deferred)
}
/// Try and get the panic message out of the panic
fn get_panic_message<'a>(panic_err: &'a (dyn std::any::Any + Send + 'static)) -> &'a str {
// Apparently this is how you extract the panic message from a panic
if let Some(str_slice) = panic_err.downcast_ref::<&str>() {
str_slice
} else if let Some(string) = panic_err.downcast_ref::<String>() {
string
} else {
"unknown error"
}
}

View File

@@ -290,26 +290,6 @@ pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
}];
pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
PushRule {
rule_id: Cow::Borrowed("global/content/.io.element.msc4306.rule.unsubscribed_thread"),
priority_class: 1,
conditions: Cow::Borrowed(&[Condition::Known(
KnownCondition::Msc4306ThreadSubscription { subscribed: false },
)]),
actions: Cow::Borrowed(&[]),
default: true,
default_enabled: true,
},
PushRule {
rule_id: Cow::Borrowed("global/content/.io.element.msc4306.rule.subscribed_thread"),
priority_class: 1,
conditions: Cow::Borrowed(&[Condition::Known(
KnownCondition::Msc4306ThreadSubscription { subscribed: true },
)]),
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
default: true,
default_enabled: true,
},
PushRule {
rule_id: Cow::Borrowed("global/underride/.m.rule.call"),
priority_class: 1,

View File

@@ -106,11 +106,8 @@ pub struct PushRuleEvaluator {
/// flag as MSC1767 (extensible events core).
msc3931_enabled: bool,
/// If MSC4210 (remove legacy mentions) is enabled.
// If MSC4210 (remove legacy mentions) is enabled.
msc4210_enabled: bool,
/// If MSC4306 (thread subscriptions) is enabled.
msc4306_enabled: bool,
}
#[pymethods]
@@ -129,7 +126,6 @@ impl PushRuleEvaluator {
room_version_feature_flags,
msc3931_enabled,
msc4210_enabled,
msc4306_enabled,
))]
pub fn py_new(
flattened_keys: BTreeMap<String, JsonValue>,
@@ -142,7 +138,6 @@ impl PushRuleEvaluator {
room_version_feature_flags: Vec<String>,
msc3931_enabled: bool,
msc4210_enabled: bool,
msc4306_enabled: bool,
) -> Result<Self, Error> {
let body = match flattened_keys.get("content.body") {
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone().into_owned(),
@@ -161,7 +156,6 @@ impl PushRuleEvaluator {
room_version_feature_flags,
msc3931_enabled,
msc4210_enabled,
msc4306_enabled,
})
}
@@ -173,19 +167,12 @@ impl PushRuleEvaluator {
///
/// Returns the set of actions, if any, that match (filtering out any
/// `dont_notify` and `coalesce` actions).
///
/// msc4306_thread_subscription_state: (Only populated if MSC4306 is enabled)
/// The thread subscription state corresponding to the thread containing this event.
/// - `None` if the event is not in a thread, or if MSC4306 is disabled.
/// - `Some(true)` if the event is in a thread and the user has a subscription for that thread
/// - `Some(false)` if the event is in a thread and the user does NOT have a subscription for that thread
#[pyo3(signature = (push_rules, user_id=None, display_name=None, msc4306_thread_subscription_state=None))]
#[pyo3(signature = (push_rules, user_id=None, display_name=None))]
pub fn run(
&self,
push_rules: &FilteredPushRules,
user_id: Option<&str>,
display_name: Option<&str>,
msc4306_thread_subscription_state: Option<bool>,
) -> Vec<Action> {
'outer: for (push_rule, enabled) in push_rules.iter() {
if !enabled {
@@ -217,12 +204,7 @@ impl PushRuleEvaluator {
Condition::Known(KnownCondition::RoomVersionSupports { feature: _ }),
);
match self.match_condition(
condition,
user_id,
display_name,
msc4306_thread_subscription_state,
) {
match self.match_condition(condition, user_id, display_name) {
Ok(true) => {}
Ok(false) => continue 'outer,
Err(err) => {
@@ -255,20 +237,14 @@ impl PushRuleEvaluator {
}
/// Check if the given condition matches.
#[pyo3(signature = (condition, user_id=None, display_name=None, msc4306_thread_subscription_state=None))]
#[pyo3(signature = (condition, user_id=None, display_name=None))]
fn matches(
&self,
condition: Condition,
user_id: Option<&str>,
display_name: Option<&str>,
msc4306_thread_subscription_state: Option<bool>,
) -> bool {
match self.match_condition(
&condition,
user_id,
display_name,
msc4306_thread_subscription_state,
) {
match self.match_condition(&condition, user_id, display_name) {
Ok(true) => true,
Ok(false) => false,
Err(err) => {
@@ -286,7 +262,6 @@ impl PushRuleEvaluator {
condition: &Condition,
user_id: Option<&str>,
display_name: Option<&str>,
msc4306_thread_subscription_state: Option<bool>,
) -> Result<bool, Error> {
let known_condition = match condition {
Condition::Known(known) => known,
@@ -418,13 +393,6 @@ impl PushRuleEvaluator {
&& self.room_version_feature_flags.contains(&flag)
}
}
KnownCondition::Msc4306ThreadSubscription { subscribed } => {
if !self.msc4306_enabled {
false
} else {
msc4306_thread_subscription_state == Some(*subscribed)
}
}
};
Ok(result)
@@ -568,11 +536,10 @@ fn push_rule_evaluator() {
vec![],
true,
false,
false,
)
.unwrap();
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"), None);
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"));
assert_eq!(result.len(), 3);
}
@@ -599,7 +566,6 @@ fn test_requires_room_version_supports_condition() {
flags,
true,
false,
false,
)
.unwrap();
@@ -609,7 +575,6 @@ fn test_requires_room_version_supports_condition() {
&FilteredPushRules::default(),
Some("@bob:example.org"),
None,
None,
);
assert_eq!(result.len(), 3);
@@ -628,17 +593,7 @@ fn test_requires_room_version_supports_condition() {
};
let rules = PushRules::new(vec![custom_rule]);
result = evaluator.run(
&FilteredPushRules::py_new(
rules,
BTreeMap::new(),
true,
false,
true,
false,
false,
false,
),
None,
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false, false),
None,
None,
);

View File

@@ -369,10 +369,6 @@ pub enum KnownCondition {
RoomVersionSupports {
feature: Cow<'static, str>,
},
#[serde(rename = "io.element.msc4306.thread_subscription")]
Msc4306ThreadSubscription {
subscribed: bool,
},
}
impl<'source> IntoPyObject<'source> for Condition {
@@ -551,13 +547,11 @@ pub struct FilteredPushRules {
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
msc4210_enabled: bool,
msc4306_enabled: bool,
}
#[pymethods]
impl FilteredPushRules {
#[new]
#[allow(clippy::too_many_arguments)]
pub fn py_new(
push_rules: PushRules,
enabled_map: BTreeMap<String, bool>,
@@ -566,7 +560,6 @@ impl FilteredPushRules {
msc3664_enabled: bool,
msc4028_push_encrypted_events: bool,
msc4210_enabled: bool,
msc4306_enabled: bool,
) -> Self {
Self {
push_rules,
@@ -576,7 +569,6 @@ impl FilteredPushRules {
msc3664_enabled,
msc4028_push_encrypted_events,
msc4210_enabled,
msc4306_enabled,
}
}
@@ -627,10 +619,6 @@ impl FilteredPushRules {
return false;
}
if !self.msc4306_enabled && rule.rule_id.contains("/.io.element.msc4306.rule.") {
return false;
}
true
})
.map(|r| {

View File

@@ -1,5 +1,5 @@
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.135/synapse-config.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.134/synapse-config.schema.json
type: object
properties:
modules:
@@ -629,70 +629,6 @@ properties:
password: mypassword
ssh_priv_key_path: CONFDIR/id_rsa
ssh_pub_key_path: CONFDIR/id_rsa.pub
http_proxy:
type: ["string", "null"]
description: >-
Proxy server to use for HTTP requests.
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md).
examples:
- "http://USERNAME:PASSWORD@10.0.1.1:8080/"
https_proxy:
type: ["string", "null"]
description: >-
Proxy server to use for HTTPS requests.
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md).
examples:
- "http://USERNAME:PASSWORD@proxy.example.com:8080/"
no_proxy_hosts:
type: array
description: >-
List of hosts, IP addresses, or IP ranges in CIDR format which should not use the
proxy. Synapse will directly connect to these hosts.
For more details, see the [forward proxy documentation](../../setup/forward_proxy.md).
examples:
- - master.hostname.example.com
- 10.1.0.0/16
- 172.30.0.0/16
matrix_authentication_service:
type: object
description: >-
The `matrix_authentication_service` setting configures integration with
[Matrix Authentication Service (MAS)](https://github.com/element-hq/matrix-authentication-service).
properties:
enabled:
type: boolean
description: >-
Whether or not to enable the MAS integration. If this is set to
`false`, Synapse will use its legacy internal authentication API.
default: false
endpoint:
type: string
format: uri
description: >-
The URL where Synapse can reach MAS. This *must* have the `discovery`
and `oauth` resources mounted.
default: http://localhost:8080
secret:
type: ["string", "null"]
description: >-
A shared secret that will be used to authenticate requests from and to MAS.
secret_path:
type: ["string", "null"]
description: >-
Alternative to `secret`, reading the shared secret from a file.
The file should be a plain text file, containing only the secret.
Synapse reads the secret from the given file once at startup.
examples:
- enabled: true
secret: someverysecuresecret
endpoint: http://localhost:8080
dummy_events_threshold:
type: integer
description: >-
@@ -2243,8 +2179,9 @@ properties:
with a short timeout, or restarting several different delayed events all
at once) without the risk of being ratelimited.
default:
per_second: 1.0
burst_count: 5.0
per_user:
per_second: 1.0
burst_count: 5.0
examples:
- per_second: 2.0
burst_count: 20.0
@@ -2265,17 +2202,6 @@ properties:
examples:
- per_second: 2.0
burst_count: 20.0
rc_room_creation:
$ref: "#/$defs/rc"
description: >-
Sets rate limits for how often users are able to create rooms.
default:
per_user:
per_second: 0.016
burst_count: 10.0
examples:
- per_second: 1.0
burst_count: 5.0
federation_rr_transactions_per_room_per_second:
type: integer
description: >-
@@ -2777,7 +2703,7 @@ properties:
Synapse reads the public key from the given file once at startup.
_Added in Synapse 1.135.0._
_Added in Synapse 1.134.0._
default: null
examples:
- /path/to/key/file
@@ -2800,7 +2726,7 @@ properties:
Synapse reads the private key from the given file once at startup.
_Added in Synapse 1.135.0._
_Added in Synapse 1.134.0._
default: null
examples:
- /path/to/key/file
@@ -4793,15 +4719,8 @@ properties:
enabled:
type: boolean
description: >-
Defines whether users can search the user directory. If `false` then
Defines whether users can search the user directory. If false then
empty responses are returned to all queries.
*Warning: While the homeserver may determine which subset of users are
searched, the Matrix specification requires homeservers to include (at
minimum) users visible in public rooms and users sharing a room with
the requester. Using `false` improves performance but violates this
requirement.*
default: true
search_all_users:
type: boolean
@@ -5458,9 +5377,6 @@ properties:
push_rules:
type: string
description: Name of a worker assigned to the `push_rules` stream.
device_lists:
type: string
description: Name of a worker assigned to the `device_lists` stream.
default: {}
examples:
- events: worker1

View File

@@ -473,10 +473,6 @@ def section(prop: str, values: dict) -> str:
def main() -> None:
# For Windows: reconfigure the terminal to be UTF-8 for `print()` calls.
if sys.platform == "win32":
sys.stdout.reconfigure(encoding="utf-8")
def usage(err_msg: str) -> int:
script_name = (sys.argv[:1] or ["__main__.py"])[0]
print(err_msg, file=sys.stderr)
@@ -489,10 +485,7 @@ def main() -> None:
exit(usage("Too many arguments."))
if not (filepath := (sys.argv[1:] or [""])[0]):
exit(usage("No schema file provided."))
with open(filepath, "r", encoding="utf-8") as f:
# Note: Windows requires that we specify the encoding otherwise it uses
# things like CP-1251, which can cause explosions.
# See https://github.com/yaml/pyyaml/issues/123 for more info.
with open(filepath) as f:
return yaml.safe_load(f)
schema = read_json_file_arg()

View File

@@ -23,195 +23,28 @@
can crop up, e.g the cache descriptors.
"""
import enum
from typing import Callable, Mapping, Optional, Tuple, Type, Union
from typing import Callable, Optional, Tuple, Type, Union
import attr
import mypy.types
from mypy.erasetype import remove_instance_last_known_values
from mypy.errorcodes import ErrorCode
from mypy.nodes import ARG_NAMED_OPT, ListExpr, NameExpr, TempNode, TupleExpr, Var
from mypy.plugin import (
ClassDefContext,
Context,
FunctionLike,
FunctionSigContext,
MethodSigContext,
MypyFile,
Plugin,
)
from mypy.nodes import ARG_NAMED_OPT, TempNode, Var
from mypy.plugin import FunctionSigContext, MethodSigContext, Plugin
from mypy.typeops import bind_self
from mypy.types import (
AnyType,
CallableType,
Instance,
NoneType,
Options,
TupleType,
TypeAliasType,
TypeVarType,
UninhabitedType,
UnionType,
)
from mypy_zope import plugin as mypy_zope_plugin
from pydantic.mypy import plugin as mypy_pydantic_plugin
PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL = ErrorCode(
"missing-server-name-label",
"`SERVER_NAME_LABEL` required in metric",
category="per-homeserver-tenant-metrics",
)
PROMETHEUS_METRIC_MISSING_FROM_LIST_TO_CHECK = ErrorCode(
"metric-type-missing-from-list",
"Every Prometheus metric type must be included in the `prometheus_metric_fullname_to_label_arg_map`.",
category="per-homeserver-tenant-metrics",
)
class Sentinel(enum.Enum):
# defining a sentinel in this way allows mypy to correctly handle the
# type of a dictionary lookup and subsequent type narrowing.
UNSET_SENTINEL = object()
@attr.s(auto_attribs=True)
class ArgLocation:
keyword_name: str
"""
The keyword argument name for this argument
"""
position: int
"""
The 0-based positional index of this argument
"""
prometheus_metric_fullname_to_label_arg_map: Mapping[str, Optional[ArgLocation]] = {
# `Collector` subclasses:
"prometheus_client.metrics.MetricWrapperBase": ArgLocation("labelnames", 2),
"prometheus_client.metrics.Counter": ArgLocation("labelnames", 2),
"prometheus_client.metrics.Histogram": ArgLocation("labelnames", 2),
"prometheus_client.metrics.Gauge": ArgLocation("labelnames", 2),
"prometheus_client.metrics.Summary": ArgLocation("labelnames", 2),
"prometheus_client.metrics.Info": ArgLocation("labelnames", 2),
"prometheus_client.metrics.Enum": ArgLocation("labelnames", 2),
"synapse.metrics.LaterGauge": ArgLocation("labelnames", 2),
"synapse.metrics.InFlightGauge": ArgLocation("labels", 2),
"synapse.metrics.GaugeBucketCollector": ArgLocation("labelnames", 2),
"prometheus_client.registry.Collector": None,
"prometheus_client.registry._EmptyCollector": None,
"prometheus_client.registry.CollectorRegistry": None,
"prometheus_client.process_collector.ProcessCollector": None,
"prometheus_client.platform_collector.PlatformCollector": None,
"prometheus_client.gc_collector.GCCollector": None,
"synapse.metrics._gc.GCCounts": None,
"synapse.metrics._gc.PyPyGCStats": None,
"synapse.metrics._reactor_metrics.ReactorLastSeenMetric": None,
"synapse.metrics.CPUMetrics": None,
"synapse.metrics.jemalloc.JemallocCollector": None,
"synapse.util.metrics.DynamicCollectorRegistry": None,
"synapse.metrics.background_process_metrics._Collector": None,
#
# `Metric` subclasses:
"prometheus_client.metrics_core.Metric": None,
"prometheus_client.metrics_core.UnknownMetricFamily": ArgLocation("labels", 3),
"prometheus_client.metrics_core.CounterMetricFamily": ArgLocation("labels", 3),
"prometheus_client.metrics_core.GaugeMetricFamily": ArgLocation("labels", 3),
"prometheus_client.metrics_core.SummaryMetricFamily": ArgLocation("labels", 3),
"prometheus_client.metrics_core.InfoMetricFamily": ArgLocation("labels", 3),
"prometheus_client.metrics_core.HistogramMetricFamily": ArgLocation("labels", 3),
"prometheus_client.metrics_core.GaugeHistogramMetricFamily": ArgLocation(
"labels", 4
),
"prometheus_client.metrics_core.StateSetMetricFamily": ArgLocation("labels", 3),
"synapse.metrics.GaugeHistogramMetricFamilyWithLabels": ArgLocation(
"labelnames", 4
),
}
"""
Map from the fullname of the Prometheus `Metric`/`Collector` classes to the keyword
argument name and positional index of the label names. This map is useful because
different metrics have different signatures for passing in label names and we just need
to know where to look.
This map should include any metrics that we collect with Prometheus. Which corresponds
to anything that inherits from `prometheus_client.registry.Collector`
(`synapse.metrics._types.Collector`) or `prometheus_client.metrics_core.Metric`. The
exhaustiveness of this list is enforced by `analyze_prometheus_metric_classes`.
The entries with `None` always fail the lint because they don't have a `labelnames`
argument (therefore, no `SERVER_NAME_LABEL`), but we include them here so that people
can notice and manually allow via a type ignore comment as the source of truth
should be in the source code.
"""
# Unbound at this point because we don't know the mypy version yet.
# This is set in the `plugin(...)` function below.
MypyPydanticPluginClass: Type[Plugin]
MypyZopePluginClass: Type[Plugin]
class SynapsePlugin(Plugin):
def __init__(self, options: Options):
super().__init__(options)
self.mypy_pydantic_plugin = MypyPydanticPluginClass(options)
self.mypy_zope_plugin = MypyZopePluginClass(options)
def set_modules(self, modules: dict[str, MypyFile]) -> None:
"""
This is called by mypy internals. We have to override this to ensure it's also
called for any other plugins that we're manually handling.
Here is how mypy describes it:
> [`self._modules`] can't be set in `__init__` because it is executed too soon
> in `build.py`. Therefore, `build.py` *must* set it later before graph processing
> starts by calling `set_modules()`.
"""
super().set_modules(modules)
self.mypy_pydantic_plugin.set_modules(modules)
self.mypy_zope_plugin.set_modules(modules)
def get_base_class_hook(
self, fullname: str
) -> Optional[Callable[[ClassDefContext], None]]:
def _get_base_class_hook(ctx: ClassDefContext) -> None:
# Run any `get_base_class_hook` checks from other plugins first.
#
# Unfortunately, because mypy only chooses the first plugin that returns a
# non-None value (known-limitation, c.f.
# https://github.com/python/mypy/issues/19524), we workaround this by
# putting our custom plugin first in the plugin order and then calling the
# other plugin's hook manually followed by our own checks.
if callback := self.mypy_pydantic_plugin.get_base_class_hook(fullname):
callback(ctx)
if callback := self.mypy_zope_plugin.get_base_class_hook(fullname):
callback(ctx)
# Now run our own checks
analyze_prometheus_metric_classes(ctx)
return _get_base_class_hook
def get_function_signature_hook(
self, fullname: str
) -> Optional[Callable[[FunctionSigContext], FunctionLike]]:
# Strip off the unique identifier for classes that are dynamically created inside
# functions. ex. `synapse.metrics.jemalloc.JemallocCollector@185` (this is the line
# number)
if "@" in fullname:
fullname = fullname.split("@", 1)[0]
# Look for any Prometheus metrics to make sure they have the `SERVER_NAME_LABEL`
# label.
if fullname in prometheus_metric_fullname_to_label_arg_map.keys():
# Because it's difficult to determine the `fullname` of the function in the
# callback, let's just pass it in while we have it.
return lambda ctx: check_prometheus_metric_instantiation(ctx, fullname)
return None
def get_method_signature_hook(
self, fullname: str
) -> Optional[Callable[[MethodSigContext], CallableType]]:
@@ -232,157 +65,6 @@ class SynapsePlugin(Plugin):
return None
def analyze_prometheus_metric_classes(ctx: ClassDefContext) -> None:
"""
Cross-check the list of Prometheus metric classes against the
`prometheus_metric_fullname_to_label_arg_map` to ensure the list is exhaustive and
up-to-date.
"""
fullname = ctx.cls.fullname
# Strip off the unique identifier for classes that are dynamically created inside
# functions. ex. `synapse.metrics.jemalloc.JemallocCollector@185` (this is the line
# number)
if "@" in fullname:
fullname = fullname.split("@", 1)[0]
if any(
ancestor_type.fullname
in (
# All of the Prometheus metric classes inherit from the `Collector`.
"prometheus_client.registry.Collector",
"synapse.metrics._types.Collector",
# And custom metrics that inherit from `Metric`.
"prometheus_client.metrics_core.Metric",
)
for ancestor_type in ctx.cls.info.mro
):
if fullname not in prometheus_metric_fullname_to_label_arg_map:
ctx.api.fail(
f"Expected {fullname} to be in `prometheus_metric_fullname_to_label_arg_map`, "
f"but it was not found. This is a problem with our custom mypy plugin. "
f"Please add it to the map.",
Context(),
code=PROMETHEUS_METRIC_MISSING_FROM_LIST_TO_CHECK,
)
def check_prometheus_metric_instantiation(
ctx: FunctionSigContext, fullname: str
) -> CallableType:
"""
Ensure that the `prometheus_client` metrics include the `SERVER_NAME_LABEL` label
when instantiated.
This is important because we support multiple Synapse instances running in the same
process, where all metrics share a single global `REGISTRY`. The `server_name` label
ensures metrics are correctly separated by homeserver.
There are also some metrics that apply at the process level, such as CPU usage,
Python garbage collection, and Twisted reactor tick time, which shouldn't have the
`SERVER_NAME_LABEL`. In those cases, use a type ignore comment to disable the
check, e.g. `# type: ignore[missing-server-name-label]`.
Args:
ctx: The `FunctionSigContext` from mypy.
fullname: The fully qualified name of the function being called,
e.g. `"prometheus_client.metrics.Counter"`
"""
# The true signature, this isn't being modified so this is what will be returned.
signature = ctx.default_signature
# Find where the label names argument is in the function signature.
arg_location = prometheus_metric_fullname_to_label_arg_map.get(
fullname, Sentinel.UNSET_SENTINEL
)
assert arg_location is not Sentinel.UNSET_SENTINEL, (
f"Expected to find {fullname} in `prometheus_metric_fullname_to_label_arg_map`, "
f"but it was not found. This is a problem with our custom mypy plugin. "
f"Please add it to the map. Context: {ctx.context}"
)
# People should be using `# type: ignore[missing-server-name-label]` for
# process-level metrics that should not have the `SERVER_NAME_LABEL`.
if arg_location is None:
ctx.api.fail(
f"{signature.name} does not have a `labelnames`/`labels` argument "
"(if this is untrue, update `prometheus_metric_fullname_to_label_arg_map` "
"in our custom mypy plugin) and should probably have a type ignore comment, "
"e.g. `# type: ignore[missing-server-name-label]`. The reason we don't "
"automatically ignore this is the source of truth should be in the source code.",
ctx.context,
code=PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL,
)
return signature
# Sanity check the arguments are still as expected in this version of
# `prometheus_client`. ex. `Counter(name, documentation, labelnames, ...)`
#
# `signature.arg_names` should be: ["name", "documentation", "labelnames", ...]
if (
len(signature.arg_names) < (arg_location.position + 1)
or signature.arg_names[arg_location.position] != arg_location.keyword_name
):
ctx.api.fail(
f"Expected argument number {arg_location.position + 1} of {signature.name} to be `labelnames`/`labels`, "
f"but got {signature.arg_names[arg_location.position]}",
ctx.context,
)
return signature
# Ensure mypy is passing the correct number of arguments because we are doing some
# dirty indexing into `ctx.args` later on.
assert len(ctx.args) == len(signature.arg_names), (
f"Expected the list of arguments in the {signature.name} signature ({len(signature.arg_names)})"
f"to match the number of arguments from the function signature context ({len(ctx.args)})"
)
# Check if the `labelnames` argument includes `SERVER_NAME_LABEL`
#
# `ctx.args` should look like this:
# ```
# [
# [StrExpr("name")],
# [StrExpr("documentation")],
# [ListExpr([StrExpr("label1"), StrExpr("label2")])]
# ...
# ]
# ```
labelnames_arg_expression = (
ctx.args[arg_location.position][0]
if len(ctx.args[arg_location.position]) > 0
else None
)
if isinstance(labelnames_arg_expression, (ListExpr, TupleExpr)):
# Check if the `labelnames` argument includes the `server_name` label (`SERVER_NAME_LABEL`).
for labelname_expression in labelnames_arg_expression.items:
if (
isinstance(labelname_expression, NameExpr)
and labelname_expression.fullname == "synapse.metrics.SERVER_NAME_LABEL"
):
# Found the `SERVER_NAME_LABEL`, all good!
break
else:
ctx.api.fail(
f"Expected {signature.name} to include `SERVER_NAME_LABEL` in the list of labels. "
"If this is a process-level metric (vs homeserver-level), use a type ignore comment "
"to disable this check.",
ctx.context,
code=PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL,
)
else:
ctx.api.fail(
f"Expected the `labelnames` argument of {signature.name} to be a list of label names "
f"(including `SERVER_NAME_LABEL`), but got {labelnames_arg_expression}. "
"If this is a process-level metric (vs homeserver-level), use a type ignore comment "
"to disable this check.",
ctx.context,
code=PROMETHEUS_METRIC_MISSING_SERVER_NAME_LABEL,
)
return signature
return signature
def _get_true_return_type(signature: CallableType) -> mypy.types.Type:
"""
Get the "final" return type of a callable which might return an Awaitable/Deferred.
@@ -690,13 +372,10 @@ def is_cacheable(
def plugin(version: str) -> Type[SynapsePlugin]:
global MypyPydanticPluginClass, MypyZopePluginClass
# This is the entry point of the plugin, and lets us deal with the fact
# that the mypy plugin interface is *not* stable by looking at the version
# string.
#
# However, since we pin the version of mypy Synapse uses in CI, we don't
# really care.
MypyPydanticPluginClass = mypy_pydantic_plugin(version)
MypyZopePluginClass = mypy_zope_plugin(version)
return SynapsePlugin

View File

@@ -45,6 +45,16 @@ if py_version < (3, 9):
# Allow using the asyncio reactor via env var.
if strtobool(os.environ.get("SYNAPSE_ASYNC_IO_REACTOR", "0")):
from incremental import Version
import twisted
# We need a bugfix that is included in Twisted 21.2.0:
# https://twistedmatrix.com/trac/ticket/9787
if twisted.version < Version("Twisted", 21, 2, 0):
print("Using asyncio reactor requires Twisted>=21.2.0")
sys.exit(1)
import asyncio
from twisted.internet import asyncioreactor

View File

@@ -34,11 +34,9 @@ HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2
if TYPE_CHECKING or HAS_PYDANTIC_V2:
from pydantic.v1 import (
AnyHttpUrl,
BaseModel,
Extra,
Field,
FilePath,
MissingError,
PydanticValueError,
StrictBool,
@@ -50,18 +48,15 @@ if TYPE_CHECKING or HAS_PYDANTIC_V2:
conint,
constr,
parse_obj_as,
root_validator,
validator,
)
from pydantic.v1.error_wrappers import ErrorWrapper
from pydantic.v1.typing import get_args
else:
from pydantic import (
AnyHttpUrl,
BaseModel,
Extra,
Field,
FilePath,
MissingError,
PydanticValueError,
StrictBool,
@@ -73,7 +68,6 @@ else:
conint,
constr,
parse_obj_as,
root_validator,
validator,
)
from pydantic.error_wrappers import ErrorWrapper
@@ -81,7 +75,6 @@ else:
__all__ = (
"HAS_PYDANTIC_V2",
"AnyHttpUrl",
"BaseModel",
"constr",
"conbytes",
@@ -90,7 +83,6 @@ __all__ = (
"ErrorWrapper",
"Extra",
"Field",
"FilePath",
"get_args",
"MissingError",
"parse_obj_as",
@@ -100,5 +92,4 @@ __all__ = (
"StrictStr",
"ValidationError",
"validator",
"root_validator",
)

View File

@@ -29,21 +29,19 @@ import attr
from synapse.config._base import (
Config,
ConfigError,
RootConfig,
find_config_files,
read_config_files,
)
from synapse.config.database import DatabaseConfig
from synapse.config.server import ServerConfig
from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
from synapse.storage.engines import create_engine
class ReviewConfig(RootConfig):
"A config class that just pulls out the server and database config"
"A config class that just pulls out the database config"
config_classes = [ServerConfig, DatabaseConfig]
config_classes = [DatabaseConfig]
@attr.s(auto_attribs=True)
@@ -150,10 +148,6 @@ def main() -> None:
config_dict = read_config_files(config_files)
config.parse_config_dict(config_dict, "", "")
server_name = config.server.server_name
if not isinstance(server_name, str):
raise ConfigError("Must be a string", ("server_name",))
since_ms = time.time() * 1000 - Config.parse_duration(config_args.since)
exclude_users_with_email = config_args.exclude_emails
exclude_users_with_appservice = config_args.exclude_app_service
@@ -165,12 +159,7 @@ def main() -> None:
engine = create_engine(database_config.config)
with make_conn(
db_config=database_config,
engine=engine,
default_txn_name="review_recent_signups",
server_name=server_name,
) as db_conn:
with make_conn(database_config, engine, "review_recent_signups") as db_conn:
# This generates a type of Cursor, not LoggingTransaction.
user_infos = get_recent_users(
db_conn.cursor(),

View File

@@ -136,7 +136,6 @@ BOOLEAN_COLUMNS = {
"has_known_state",
"is_encrypted",
],
"thread_subscriptions": ["subscribed", "automatic"],
"users": ["shadow_banned", "approved", "locked", "suspended"],
"un_partial_stated_event_stream": ["rejection_status_changed"],
"users_who_share_rooms": ["share_private"],
@@ -191,18 +190,13 @@ APPEND_ONLY_TABLES = [
"users",
]
# These tables declare their id column with "PRIMARY KEY AUTOINCREMENT" on sqlite side
# and with "PRIMARY KEY GENERATED ALWAYS AS IDENTITY" on postgres side. This creates an
# implicit sequence that needs its value to be migrated separately. Additionally,
# inserting on postgres side needs to use the "OVERRIDING SYSTEM VALUE" modifier.
AUTOINCREMENT_TABLES = {
"sliding_sync_connections",
"sliding_sync_connection_positions",
"sliding_sync_connection_required_state",
"state_groups_pending_deletion",
}
IGNORED_TABLES = {
# Porting the auto generated sequence in this table is non-trivial.
# None of the entries in this list are mandatory for Synapse to keep working.
# If state group disk space is an issue after the port, the
# `mark_unreferenced_state_groups_for_deletion_bg_update` background task can be run again.
"state_groups_pending_deletion",
# We don't port these tables, as they're a faff and we can regenerate
# them anyway.
"user_directory",
@@ -290,17 +284,11 @@ class Store(
return self.db_pool.runInteraction("execute_sql", r)
def insert_many_txn(
self,
txn: LoggingTransaction,
table: str,
headers: List[str],
rows: List[Tuple],
override_system_value: bool = False,
self, txn: LoggingTransaction, table: str, headers: List[str], rows: List[Tuple]
) -> None:
sql = "INSERT INTO %s (%s) %s VALUES (%s)" % (
sql = "INSERT INTO %s (%s) VALUES (%s)" % (
table,
", ".join(k for k in headers),
"OVERRIDING SYSTEM VALUE" if override_system_value else "",
", ".join("%s" for _ in headers),
)
@@ -544,13 +532,7 @@ class Porter:
def insert(txn: LoggingTransaction) -> None:
assert headers is not None
self.postgres_store.insert_many_txn(
txn,
table,
headers[1:],
rows,
override_system_value=table in AUTOINCREMENT_TABLES,
)
self.postgres_store.insert_many_txn(txn, table, headers[1:], rows)
self.postgres_store.db_pool.simple_update_one_txn(
txn,
@@ -672,14 +654,8 @@ class Porter:
engine = create_engine(db_config.config)
hs = MockHomeserver(self.hs_config)
server_name = hs.hostname
with make_conn(
db_config=db_config,
engine=engine,
default_txn_name="portdb",
server_name=server_name,
) as db_conn:
with make_conn(db_config, engine, "portdb") as db_conn:
engine.check_database(
db_conn, allow_outdated_version=allow_outdated_version
)
@@ -908,19 +884,6 @@ class Porter:
],
)
await self._setup_autoincrement_sequence(
"sliding_sync_connection_positions", "connection_position"
)
await self._setup_autoincrement_sequence(
"sliding_sync_connection_required_state", "required_state_id"
)
await self._setup_autoincrement_sequence(
"sliding_sync_connections", "connection_key"
)
await self._setup_autoincrement_sequence(
"state_groups_pending_deletion", "sequence_number"
)
# Step 3. Get tables.
self.progress.set_state("Fetching tables")
sqlite_tables = await self.sqlite_store.db_pool.simple_select_onecol(
@@ -1253,49 +1216,6 @@ class Porter:
"_setup_%s" % (sequence_name,), r
)
async def _setup_autoincrement_sequence(
self,
sqlite_table_name: str,
sqlite_id_column_name: str,
) -> None:
"""Set a sequence to the correct value. Use where id column was declared with PRIMARY KEY AUTOINCREMENT."""
seq_name = await self._pg_get_serial_sequence(
sqlite_table_name, sqlite_id_column_name
)
if seq_name is None:
raise Exception(
"implicit sequence not found for table " + sqlite_table_name
)
seq_value = await self.sqlite_store.db_pool.simple_select_one_onecol(
table="sqlite_sequence",
keyvalues={"name": sqlite_table_name},
retcol="seq",
allow_none=True,
)
if seq_value is None:
return
def r(txn: LoggingTransaction) -> None:
sql = "ALTER SEQUENCE %s RESTART WITH" % (seq_name,)
txn.execute(sql + " %s", (seq_value + 1,))
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (seq_name,), r)
async def _pg_get_serial_sequence(self, table: str, column: str) -> Optional[str]:
"""Returns the name of the postgres sequence associated with a column, or NULL."""
def r(txn: LoggingTransaction) -> Optional[str]:
txn.execute("SELECT pg_get_serial_sequence('%s', '%s')" % (table, column))
result = txn.fetchone()
if not result:
return None
return result[0]
return await self.postgres_store.db_pool.runInteraction(
"_pg_get_serial_sequence", r
)
async def _setup_auth_chain_sequence(self) -> None:
curr_chain_id: Optional[
int

View File

@@ -53,7 +53,6 @@ class MockHomeserver(HomeServer):
def run_background_updates(hs: HomeServer) -> None:
server_name = hs.hostname
main = hs.get_datastores().main
state = hs.get_datastores().state
@@ -67,11 +66,7 @@ def run_background_updates(hs: HomeServer) -> None:
def run() -> None:
# Apply all background updates on the database.
defer.ensureDeferred(
run_as_background_process(
"background_updates",
server_name,
run_background_updates,
)
run_as_background_process("background_updates", run_background_updates)
)
reactor.callWhenRunning(run)

View File

@@ -20,13 +20,10 @@
#
from typing import TYPE_CHECKING, Optional, Protocol, Tuple
from prometheus_client import Histogram
from twisted.web.server import Request
from synapse.appservice import ApplicationService
from synapse.http.site import SynapseRequest
from synapse.metrics import SERVER_NAME_LABEL
from synapse.types import Requester
if TYPE_CHECKING:
@@ -36,13 +33,6 @@ if TYPE_CHECKING:
GUEST_DEVICE_ID = "guest_device"
introspection_response_timer = Histogram(
"synapse_api_auth_delegated_introspection_response",
"Time taken to get a response for an introspection request",
labelnames=["code", SERVER_NAME_LABEL],
)
class Auth(Protocol):
"""The interface that an auth provider must implement."""

View File

@@ -172,7 +172,7 @@ class BaseAuth:
"""
# It's ok if the app service is trying to use the sender from their registration
if app_service.sender.to_string() == user_id:
if app_service.sender == user_id:
pass
# Check to make sure the app service is allowed to control the user
elif not app_service.is_interested_in_user(user_id):

View File

@@ -1,432 +0,0 @@
#
# This file is licensed under the Affero General Public License (AGPL) version 3.
#
# Copyright (C) 2025 New Vector, Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# See the GNU Affero General Public License for more details:
# <https://www.gnu.org/licenses/agpl-3.0.html>.
#
#
import logging
from typing import TYPE_CHECKING, Optional
from urllib.parse import urlencode
from synapse._pydantic_compat import (
BaseModel,
Extra,
StrictBool,
StrictInt,
StrictStr,
ValidationError,
)
from synapse.api.auth.base import BaseAuth
from synapse.api.errors import (
AuthError,
HttpResponseException,
InvalidClientTokenError,
SynapseError,
UnrecognizedRequestError,
)
from synapse.http.site import SynapseRequest
from synapse.logging.context import PreserveLoggingContext
from synapse.logging.opentracing import (
active_span,
force_tracing,
inject_request_headers,
start_active_span,
)
from synapse.metrics import SERVER_NAME_LABEL
from synapse.synapse_rust.http_client import HttpClient
from synapse.types import JsonDict, Requester, UserID, create_requester
from synapse.util import json_decoder
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
from . import introspection_response_timer
if TYPE_CHECKING:
from synapse.rest.admin.experimental_features import ExperimentalFeature
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
# Scope as defined by MSC2967
# https://github.com/matrix-org/matrix-spec-proposals/pull/2967
SCOPE_MATRIX_API = "urn:matrix:org.matrix.msc2967.client:api:*"
SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:org.matrix.msc2967.client:device:"
class ServerMetadata(BaseModel):
class Config:
extra = Extra.allow
issuer: StrictStr
account_management_uri: StrictStr
class IntrospectionResponse(BaseModel):
retrieved_at_ms: StrictInt
active: StrictBool
scope: Optional[StrictStr]
username: Optional[StrictStr]
sub: Optional[StrictStr]
device_id: Optional[StrictStr]
expires_in: Optional[StrictInt]
class Config:
extra = Extra.allow
def get_scope_set(self) -> set[str]:
if not self.scope:
return set()
return {token for token in self.scope.split(" ") if token}
def is_active(self, now_ms: int) -> bool:
if not self.active:
return False
# Compatibility tokens don't expire and don't have an 'expires_in' field
if self.expires_in is None:
return True
absolute_expiry_ms = self.expires_in * 1000 + self.retrieved_at_ms
return now_ms < absolute_expiry_ms
class MasDelegatedAuth(BaseAuth):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.server_name = hs.hostname
self._clock = hs.get_clock()
self._config = hs.config.mas
self._http_client = hs.get_proxied_http_client()
self._rust_http_client = HttpClient(
reactor=hs.get_reactor(),
user_agent=self._http_client.user_agent.decode("utf8"),
)
self._server_metadata = RetryOnExceptionCachedCall[ServerMetadata](
self._load_metadata
)
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
# # Token Introspection Cache
# This remembers what users/devices are represented by which access tokens,
# in order to reduce overall system load:
# - on Synapse (as requests are relatively expensive)
# - on the network
# - on MAS
#
# Since there is no invalidation mechanism currently,
# the entries expire after 2 minutes.
# This does mean tokens can be treated as valid by Synapse
# for longer than reality.
#
# Ideally, tokens should logically be invalidated in the following circumstances:
# - If a session logout happens.
# In this case, MAS will delete the device within Synapse
# anyway and this is good enough as an invalidation.
# - If the client refreshes their token in MAS.
# In this case, the device still exists and it's not the end of the world for
# the old access token to continue working for a short time.
self._introspection_cache: ResponseCache[str] = ResponseCache(
clock=self._clock,
name="mas_token_introspection",
server_name=self.server_name,
timeout_ms=120_000,
# don't log because the keys are access tokens
enable_logging=False,
)
@property
def _metadata_url(self) -> str:
return f"{self._config.endpoint.rstrip('/')}/.well-known/openid-configuration"
@property
def _introspection_endpoint(self) -> str:
return f"{self._config.endpoint.rstrip('/')}/oauth2/introspect"
async def _load_metadata(self) -> ServerMetadata:
response = await self._http_client.get_json(self._metadata_url)
metadata = ServerMetadata(**response)
return metadata
async def issuer(self) -> str:
metadata = await self._server_metadata.get()
return metadata.issuer
async def account_management_url(self) -> str:
metadata = await self._server_metadata.get()
return metadata.account_management_uri
async def auth_metadata(self) -> JsonDict:
metadata = await self._server_metadata.get()
return metadata.dict()
def is_request_using_the_shared_secret(self, request: SynapseRequest) -> bool:
"""
Check if the request is using the shared secret.
Args:
request: The request to check.
Returns:
True if the request is using the shared secret, False otherwise.
"""
access_token = self.get_access_token_from_request(request)
shared_secret = self._config.secret()
if not shared_secret:
return False
return access_token == shared_secret
async def _introspect_token(
self, token: str, cache_context: ResponseCacheContext[str]
) -> IntrospectionResponse:
"""
Send a token to the introspection endpoint and returns the introspection response
Parameters:
token: The token to introspect
Raises:
HttpResponseException: If the introspection endpoint returns a non-2xx response
ValueError: If the introspection endpoint returns an invalid JSON response
JSONDecodeError: If the introspection endpoint returns a non-JSON response
Exception: If the HTTP request fails
Returns:
The introspection response
"""
# By default, we shouldn't cache the result unless we know it's valid
cache_context.should_cache = False
raw_headers: dict[str, str] = {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "application/json",
"Authorization": f"Bearer {self._config.secret()}",
# Tell MAS that we support reading the device ID as an explicit
# value, not encoded in the scope. This is supported by MAS 0.15+
"X-MAS-Supports-Device-Id": "1",
}
args = {"token": token, "token_type_hint": "access_token"}
body = urlencode(args, True)
# Do the actual request
logger.debug("Fetching token from MAS")
start_time = self._clock.time()
try:
with start_active_span("mas-introspect-token"):
inject_request_headers(raw_headers)
with PreserveLoggingContext():
resp_body = await self._rust_http_client.post(
url=self._introspection_endpoint,
response_limit=1 * 1024 * 1024,
headers=raw_headers,
request_body=body,
)
except HttpResponseException as e:
end_time = self._clock.time()
introspection_response_timer.labels(
code=e.code, **{SERVER_NAME_LABEL: self.server_name}
).observe(end_time - start_time)
raise
except Exception:
end_time = self._clock.time()
introspection_response_timer.labels(
code="ERR", **{SERVER_NAME_LABEL: self.server_name}
).observe(end_time - start_time)
raise
logger.debug("Fetched token from MAS")
end_time = self._clock.time()
introspection_response_timer.labels(
code=200, **{SERVER_NAME_LABEL: self.server_name}
).observe(end_time - start_time)
raw_response = json_decoder.decode(resp_body.decode("utf-8"))
try:
response = IntrospectionResponse(
retrieved_at_ms=self._clock.time_msec(),
**raw_response,
)
except ValidationError as e:
raise ValueError(
"The introspection endpoint returned an invalid JSON response"
) from e
# We had a valid response, so we can cache it
cache_context.should_cache = True
return response
async def is_server_admin(self, requester: Requester) -> bool:
return "urn:synapse:admin:*" in requester.scope
async def get_user_by_req(
self,
request: SynapseRequest,
allow_guest: bool = False,
allow_expired: bool = False,
allow_locked: bool = False,
) -> Requester:
parent_span = active_span()
with start_active_span("get_user_by_req"):
access_token = self.get_access_token_from_request(request)
requester = await self.get_appservice_user(request, access_token)
if not requester:
requester = await self.get_user_by_access_token(
token=access_token,
allow_expired=allow_expired,
)
await self._record_request(request, requester)
request.requester = requester
if parent_span:
if requester.authenticated_entity in self._force_tracing_for_users:
# request tracing is enabled for this user, so we need to force it
# tracing on for the parent span (which will be the servlet span).
#
# It's too late for the get_user_by_req span to inherit the setting,
# so we also force it on for that.
force_tracing()
force_tracing(parent_span)
parent_span.set_tag(
"authenticated_entity", requester.authenticated_entity
)
parent_span.set_tag("user_id", requester.user.to_string())
if requester.device_id is not None:
parent_span.set_tag("device_id", requester.device_id)
if requester.app_service is not None:
parent_span.set_tag("appservice_id", requester.app_service.id)
return requester
async def get_user_by_access_token(
self,
token: str,
allow_expired: bool = False,
) -> Requester:
try:
introspection_result = await self._introspection_cache.wrap(
token, self._introspect_token, token, cache_context=True
)
except Exception:
logger.exception("Failed to introspect token")
raise SynapseError(503, "Unable to introspect the access token")
logger.debug("Introspection result: %r", introspection_result)
if not introspection_result.is_active(self._clock.time_msec()):
raise InvalidClientTokenError("Token is not active")
# Let's look at the scope
scope = introspection_result.get_scope_set()
# Determine type of user based on presence of particular scopes
if SCOPE_MATRIX_API not in scope:
raise InvalidClientTokenError(
"Token doesn't grant access to the Matrix C-S API"
)
if introspection_result.username is None:
raise AuthError(
500,
"Invalid username claim in the introspection result",
)
user_id = UserID(
localpart=introspection_result.username,
domain=self.server_name,
)
# Try to find a user from the username claim
user_info = await self.store.get_user_by_id(user_id=user_id.to_string())
if user_info is None:
raise AuthError(
500,
"User not found",
)
# MAS will give us the device ID as an explicit value for *compatibility* sessions
# If present, we get it from here, if not we get it in the scope for next-gen sessions
device_id = introspection_result.device_id
if device_id is None:
# Find device_ids in scope
# We only allow a single device_id in the scope, so we find them all in the
# scope list, and raise if there are more than one. The OIDC server should be
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
device_ids = [
tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :]
for tok in scope
if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX)
]
if len(device_ids) > 1:
raise AuthError(
500,
"Multiple device IDs in scope",
)
device_id = device_ids[0] if device_ids else None
if device_id is not None:
# Sanity check the device_id
if len(device_id) > 255 or len(device_id) < 1:
raise AuthError(
500,
"Invalid device ID in introspection result",
)
# Make sure the device exists. This helps with introspection cache
# invalidation: if we log out, the device gets deleted by MAS
device = await self.store.get_device(
user_id=user_id.to_string(),
device_id=device_id,
)
if device is None:
# Invalidate the introspection cache, the device was deleted
self._introspection_cache.unset(token)
raise InvalidClientTokenError("Token is not active")
return create_requester(
user_id=user_id,
device_id=device_id,
scope=scope,
)
async def get_user_by_req_experimental_feature(
self,
request: SynapseRequest,
feature: "ExperimentalFeature",
allow_guest: bool = False,
allow_expired: bool = False,
allow_locked: bool = False,
) -> Requester:
try:
requester = await self.get_user_by_req(
request,
allow_guest=allow_guest,
allow_expired=allow_expired,
allow_locked=allow_locked,
)
if await self.store.is_feature_enabled(requester.user.to_string(), feature):
return requester
raise UnrecognizedRequestError(code=404)
except (AuthError, InvalidClientTokenError):
if feature.is_globally_enabled(self.hs.config):
# If its globally enabled then return the auth error
raise
raise UnrecognizedRequestError(code=404)

View File

@@ -28,6 +28,7 @@ from authlib.oauth2.auth import encode_client_secret_basic, encode_client_secret
from authlib.oauth2.rfc7523 import ClientSecretJWT, PrivateKeyJWT, private_key_jwt_sign
from authlib.oauth2.rfc7662 import IntrospectionToken
from authlib.oidc.discovery import OpenIDProviderMetadata, get_well_known_url
from prometheus_client import Histogram
from synapse.api.auth.base import BaseAuth
from synapse.api.errors import (
@@ -46,21 +47,25 @@ from synapse.logging.opentracing import (
inject_request_headers,
start_active_span,
)
from synapse.metrics import SERVER_NAME_LABEL
from synapse.synapse_rust.http_client import HttpClient
from synapse.types import Requester, UserID, create_requester
from synapse.util import json_decoder
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
from . import introspection_response_timer
if TYPE_CHECKING:
from synapse.rest.admin.experimental_features import ExperimentalFeature
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
introspection_response_timer = Histogram(
"synapse_api_auth_delegated_introspection_response",
"Time taken to get a response for an introspection request",
["code"],
)
# Scope as defined by MSC2967
# https://github.com/matrix-org/matrix-spec-proposals/pull/2967
SCOPE_MATRIX_API = "urn:matrix:org.matrix.msc2967.client:api:*"
@@ -171,7 +176,6 @@ class MSC3861DelegatedAuth(BaseAuth):
assert self._config.client_id, "No client_id provided"
assert auth_method is not None, "Invalid client_auth_method provided"
self.server_name = hs.hostname
self._clock = hs.get_clock()
self._http_client = hs.get_proxied_http_client()
self._hostname = hs.hostname
@@ -179,8 +183,7 @@ class MSC3861DelegatedAuth(BaseAuth):
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
self._rust_http_client = HttpClient(
reactor=hs.get_reactor(),
user_agent=self._http_client.user_agent.decode("utf8"),
user_agent=self._http_client.user_agent.decode("utf8")
)
# # Token Introspection Cache
@@ -203,9 +206,8 @@ class MSC3861DelegatedAuth(BaseAuth):
# In this case, the device still exists and it's not the end of the world for
# the old access token to continue working for a short time.
self._introspection_cache: ResponseCache[str] = ResponseCache(
clock=self._clock,
name="token_introspection",
server_name=self.server_name,
self._clock,
"token_introspection",
timeout_ms=120_000,
# don't log because the keys are access tokens
enable_logging=False,
@@ -336,23 +338,17 @@ class MSC3861DelegatedAuth(BaseAuth):
)
except HttpResponseException as e:
end_time = self._clock.time()
introspection_response_timer.labels(
code=e.code, **{SERVER_NAME_LABEL: self.server_name}
).observe(end_time - start_time)
introspection_response_timer.labels(e.code).observe(end_time - start_time)
raise
except Exception:
end_time = self._clock.time()
introspection_response_timer.labels(
code="ERR", **{SERVER_NAME_LABEL: self.server_name}
).observe(end_time - start_time)
introspection_response_timer.labels("ERR").observe(end_time - start_time)
raise
logger.debug("Fetched token from MAS")
end_time = self._clock.time()
introspection_response_timer.labels(
code=200, **{SERVER_NAME_LABEL: self.server_name}
).observe(end_time - start_time)
introspection_response_timer.labels(200).observe(end_time - start_time)
resp = json_decoder.decode(resp_body.decode("utf-8"))
@@ -370,12 +366,6 @@ class MSC3861DelegatedAuth(BaseAuth):
async def is_server_admin(self, requester: Requester) -> bool:
return "urn:synapse:admin:*" in requester.scope
def _is_access_token_the_admin_token(self, token: str) -> bool:
admin_token = self._admin_token()
if admin_token is None:
return False
return token == admin_token
async def get_user_by_req(
self,
request: SynapseRequest,
@@ -441,7 +431,7 @@ class MSC3861DelegatedAuth(BaseAuth):
requester = await self.get_user_by_access_token(access_token, allow_expired)
# Do not record requests from MAS using the virtual `__oidc_admin` user.
if not self._is_access_token_the_admin_token(access_token):
if access_token != self._admin_token():
await self._record_request(request, requester)
if not allow_guest and requester.is_guest:
@@ -477,29 +467,17 @@ class MSC3861DelegatedAuth(BaseAuth):
raise UnrecognizedRequestError(code=404)
def is_request_using_the_admin_token(self, request: SynapseRequest) -> bool:
"""
Check if the request is using the admin token.
Args:
request: The request to check.
Returns:
True if the request is using the admin token, False otherwise.
"""
access_token = self.get_access_token_from_request(request)
return self._is_access_token_the_admin_token(access_token)
async def get_user_by_access_token(
self,
token: str,
allow_expired: bool = False,
) -> Requester:
if self._is_access_token_the_admin_token(token):
admin_token = self._admin_token()
if admin_token is not None and token == admin_token:
# XXX: This is a temporary solution so that the admin API can be called by
# the OIDC provider. This will be removed once we have OIDC client
# credentials grant support in matrix-authentication-service.
logger.info("Admin token used")
logger.info("Admin toked used")
# XXX: that user doesn't exist and won't be provisioned.
# This is mostly fine for admin calls, but we should also think about doing
# requesters without a user_id.

View File

@@ -140,12 +140,6 @@ class Codes(str, Enum):
# Part of MSC4155
INVITE_BLOCKED = "ORG.MATRIX.MSC4155.M_INVITE_BLOCKED"
# Part of MSC4306: Thread Subscriptions
MSC4306_CONFLICTING_UNSUBSCRIPTION = (
"IO.ELEMENT.MSC4306.M_CONFLICTING_UNSUBSCRIPTION"
)
MSC4306_NOT_IN_THREAD = "IO.ELEMENT.MSC4306.M_NOT_IN_THREAD"
class CodeMessageException(RuntimeError):
"""An exception with integer code, a message string attributes and optional headers.

View File

@@ -36,12 +36,14 @@ class EventFormatVersions:
ROOM_V1_V2 = 1 # $id:server event id format: used for room v1 and v2
ROOM_V3 = 2 # MSC1659-style $hash event id format: used for room v3
ROOM_V4_PLUS = 3 # MSC1884-style $hash format: introduced for room v4
ROOM_V11_PLUS_MSC4291 = 4 # MSC4291 room IDs as hashes: introduced for room MSC4291v11
KNOWN_EVENT_FORMAT_VERSIONS = {
EventFormatVersions.ROOM_V1_V2,
EventFormatVersions.ROOM_V3,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.ROOM_V11_PLUS_MSC4291,
}
@@ -109,6 +111,8 @@ class RoomVersion:
msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag
# MSC3757: Restricting who can overwrite a state event
msc3757_enabled: bool
# MSC4291: Room IDs as hashes of the create event
msc4291_room_ids_as_hashes: bool
class RoomVersions:
@@ -131,6 +135,7 @@ class RoomVersions:
enforce_int_power_levels=False,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
V2 = RoomVersion(
"2",
@@ -151,6 +156,7 @@ class RoomVersions:
enforce_int_power_levels=False,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
V3 = RoomVersion(
"3",
@@ -171,6 +177,7 @@ class RoomVersions:
enforce_int_power_levels=False,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
V4 = RoomVersion(
"4",
@@ -191,6 +198,7 @@ class RoomVersions:
enforce_int_power_levels=False,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
V5 = RoomVersion(
"5",
@@ -211,6 +219,7 @@ class RoomVersions:
enforce_int_power_levels=False,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
V6 = RoomVersion(
"6",
@@ -231,6 +240,7 @@ class RoomVersions:
enforce_int_power_levels=False,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
V7 = RoomVersion(
"7",
@@ -251,6 +261,7 @@ class RoomVersions:
enforce_int_power_levels=False,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
V8 = RoomVersion(
"8",
@@ -271,6 +282,7 @@ class RoomVersions:
enforce_int_power_levels=False,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
V9 = RoomVersion(
"9",
@@ -291,6 +303,7 @@ class RoomVersions:
enforce_int_power_levels=False,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
V10 = RoomVersion(
"10",
@@ -311,6 +324,7 @@ class RoomVersions:
enforce_int_power_levels=True,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
MSC1767v10 = RoomVersion(
# MSC1767 (Extensible Events) based on room version "10"
@@ -332,6 +346,7 @@ class RoomVersions:
enforce_int_power_levels=True,
msc3931_push_features=(PushRuleRoomFlag.EXTENSIBLE_EVENTS,),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
MSC3757v10 = RoomVersion(
# MSC3757 (Restricting who can overwrite a state event) based on room version "10"
@@ -353,6 +368,7 @@ class RoomVersions:
enforce_int_power_levels=True,
msc3931_push_features=(),
msc3757_enabled=True,
msc4291_room_ids_as_hashes=False,
)
V11 = RoomVersion(
"11",
@@ -373,6 +389,7 @@ class RoomVersions:
enforce_int_power_levels=True,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=False,
)
MSC3757v11 = RoomVersion(
# MSC3757 (Restricting who can overwrite a state event) based on room version "11"
@@ -394,6 +411,28 @@ class RoomVersions:
enforce_int_power_levels=True,
msc3931_push_features=(),
msc3757_enabled=True,
msc4291_room_ids_as_hashes=False,
)
MSC4291v11 = RoomVersion(
"org.matrix.msc4291.11",
RoomDisposition.UNSTABLE,
EventFormatVersions.ROOM_V11_PLUS_MSC4291,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
strict_canonicaljson=True,
limit_notifications_power_levels=True,
implicit_room_creator=True, # Used by MSC3820
updated_redaction_rules=True, # Used by MSC3820
restricted_join_rule=True,
restricted_join_rule_fix=True,
knock_join_rule=True,
msc3389_relation_redactions=False,
knock_restricted_join_rule=True,
enforce_int_power_levels=True,
msc3931_push_features=(),
msc3757_enabled=False,
msc4291_room_ids_as_hashes=True,
)
@@ -413,6 +452,7 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
RoomVersions.V11,
RoomVersions.MSC3757v10,
RoomVersions.MSC3757v11,
RoomVersions.MSC4291v11,
)
}

View File

@@ -75,7 +75,7 @@ from synapse.http.site import SynapseSite
from synapse.logging.context import PreserveLoggingContext
from synapse.logging.opentracing import init_tracer
from synapse.metrics import install_gc_manager, register_threadpool
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.metrics.jemalloc import setup_jemalloc_stats
from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers
from synapse.module_api.callbacks.third_party_event_rules_callbacks import (
@@ -286,16 +286,6 @@ def register_start(
def listen_metrics(bind_addresses: StrCollection, port: int) -> None:
"""
Start Prometheus metrics server.
This method runs the metrics server on a different port, in a different thread to
Synapse. This can make it more resilient to heavy load in Synapse causing metric
requests to be slow or timeout.
Even though `start_http_server_prometheus(...)` uses `threading.Thread` behind the
scenes (where all threads share the GIL and only one thread can execute Python
bytecode at a time), this still works because the metrics thread can preempt the
Twisted reactor thread between bytecode boundaries and the metrics thread gets
scheduled with roughly equal priority to the Twisted reactor thread.
"""
from prometheus_client import start_http_server as start_http_server_prometheus
@@ -303,9 +293,32 @@ def listen_metrics(bind_addresses: StrCollection, port: int) -> None:
for host in bind_addresses:
logger.info("Starting metrics listener on %s:%d", host, port)
_set_prometheus_client_use_created_metrics(False)
start_http_server_prometheus(port, addr=host, registry=RegistryProxy)
def _set_prometheus_client_use_created_metrics(new_value: bool) -> None:
"""
Sets whether prometheus_client should expose `_created`-suffixed metrics for
all gauges, histograms and summaries.
There is no programmatic way to disable this without poking at internals;
the proper way is to use an environment variable which prometheus_client
loads at import time.
The motivation for disabling these `_created` metrics is that they're
a waste of space as they're not useful but they take up space in Prometheus.
"""
import prometheus_client.metrics
if hasattr(prometheus_client.metrics, "_use_created"):
prometheus_client.metrics._use_created = new_value
else:
logger.error(
"Can't disable `_created` metrics in prometheus_client (brittle hack broken?)"
)
def listen_manhole(
bind_addresses: StrCollection,
port: int,
@@ -512,7 +525,6 @@ async def start(hs: "HomeServer") -> None:
Args:
hs: homeserver instance
"""
server_name = hs.hostname
reactor = hs.get_reactor()
# We want to use a separate thread pool for the resolver so that large
@@ -525,34 +537,22 @@ async def start(hs: "HomeServer") -> None:
)
# Register the threadpools with our metrics.
register_threadpool(
name="default", server_name=server_name, threadpool=reactor.getThreadPool()
)
register_threadpool(
name="gai_resolver", server_name=server_name, threadpool=resolver_threadpool
)
register_threadpool("default", reactor.getThreadPool())
register_threadpool("gai_resolver", resolver_threadpool)
# Set up the SIGHUP machinery.
if hasattr(signal, "SIGHUP"):
def handle_sighup(*args: Any, **kwargs: Any) -> "defer.Deferred[None]":
async def _handle_sighup(*args: Any, **kwargs: Any) -> None:
# Tell systemd our state, if we're using it. This will silently fail if
# we're not using systemd.
sdnotify(b"RELOADING=1")
@wrap_as_background_process("sighup")
async def handle_sighup(*args: Any, **kwargs: Any) -> None:
# Tell systemd our state, if we're using it. This will silently fail if
# we're not using systemd.
sdnotify(b"RELOADING=1")
for i, args, kwargs in _sighup_callbacks:
i(*args, **kwargs)
for i, args, kwargs in _sighup_callbacks:
i(*args, **kwargs)
sdnotify(b"READY=1")
return run_as_background_process(
"sighup",
server_name,
_handle_sighup,
*args,
**kwargs,
)
sdnotify(b"READY=1")
# We defer running the sighup handlers until next reactor tick. This
# is so that we're in a sane state, e.g. flushing the logs may fail

View File

@@ -104,9 +104,6 @@ from synapse.storage.databases.main.stats import StatsStore
from synapse.storage.databases.main.stream import StreamWorkerStore
from synapse.storage.databases.main.tags import TagsWorkerStore
from synapse.storage.databases.main.task_scheduler import TaskSchedulerWorkerStore
from synapse.storage.databases.main.thread_subscriptions import (
ThreadSubscriptionsWorkerStore,
)
from synapse.storage.databases.main.transactions import TransactionWorkerStore
from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore
from synapse.storage.databases.main.user_directory import UserDirectoryStore
@@ -135,7 +132,6 @@ class GenericWorkerStore(
KeyStore,
RoomWorkerStore,
DirectoryWorkerStore,
ThreadSubscriptionsWorkerStore,
PushRulesWorkerStore,
ApplicationServiceTransactionWorkerStore,
ApplicationServiceWorkerStore,

View File

@@ -26,12 +26,7 @@ from typing import TYPE_CHECKING, List, Mapping, Sized, Tuple
from prometheus_client import Gauge
from twisted.internet import defer
from synapse.metrics import SERVER_NAME_LABEL
from synapse.metrics.background_process_metrics import (
run_as_background_process,
)
from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.types import JsonDict
from synapse.util.constants import ONE_HOUR_SECONDS, ONE_MINUTE_SECONDS
@@ -58,158 +53,138 @@ Phone home stats are sent every 3 hours
_stats_process: List[Tuple[int, "resource.struct_rusage"]] = []
# Gauges to expose monthly active user control metrics
current_mau_gauge = Gauge(
"synapse_admin_mau_current",
"Current MAU",
labelnames=[SERVER_NAME_LABEL],
)
current_mau_gauge = Gauge("synapse_admin_mau_current", "Current MAU")
current_mau_by_service_gauge = Gauge(
"synapse_admin_mau_current_mau_by_service",
"Current MAU by service",
labelnames=["app_service", SERVER_NAME_LABEL],
)
max_mau_gauge = Gauge(
"synapse_admin_mau_max",
"MAU Limit",
labelnames=[SERVER_NAME_LABEL],
["app_service"],
)
max_mau_gauge = Gauge("synapse_admin_mau_max", "MAU Limit")
registered_reserved_users_mau_gauge = Gauge(
"synapse_admin_mau_registered_reserved_users",
"Registered users with reserved threepids",
labelnames=[SERVER_NAME_LABEL],
)
def phone_stats_home(
@wrap_as_background_process("phone_stats_home")
async def phone_stats_home(
hs: "HomeServer",
stats: JsonDict,
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
) -> "defer.Deferred[None]":
server_name = hs.hostname
) -> None:
"""Collect usage statistics and send them to the configured endpoint.
async def _phone_stats_home(
hs: "HomeServer",
stats: JsonDict,
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
) -> None:
"""Collect usage statistics and send them to the configured endpoint.
Args:
hs: the HomeServer object to use for gathering usage data.
stats: the dict in which to store the statistics sent to the configured
endpoint. Mostly used in tests to figure out the data that is supposed to
be sent.
stats_process: statistics about resource usage of the process.
"""
Args:
hs: the HomeServer object to use for gathering usage data.
stats: the dict in which to store the statistics sent to the configured
endpoint. Mostly used in tests to figure out the data that is supposed to
be sent.
stats_process: statistics about resource usage of the process.
"""
logger.info("Gathering stats for reporting")
now = int(hs.get_clock().time())
# Ensure the homeserver has started.
assert hs.start_time is not None
uptime = int(now - hs.start_time)
if uptime < 0:
uptime = 0
logger.info("Gathering stats for reporting")
now = int(hs.get_clock().time())
# Ensure the homeserver has started.
assert hs.start_time is not None
uptime = int(now - hs.start_time)
if uptime < 0:
uptime = 0
#
# Performance statistics. Keep this early in the function to maintain reliability of `test_performance_100` test.
#
old = stats_process[0]
new = (now, resource.getrusage(resource.RUSAGE_SELF))
stats_process[0] = new
#
# Performance statistics. Keep this early in the function to maintain reliability of `test_performance_100` test.
#
old = stats_process[0]
new = (now, resource.getrusage(resource.RUSAGE_SELF))
stats_process[0] = new
# Get RSS in bytes
stats["memory_rss"] = new[1].ru_maxrss
# Get RSS in bytes
stats["memory_rss"] = new[1].ru_maxrss
# Get CPU time in % of a single core, not % of all cores
used_cpu_time = (new[1].ru_utime + new[1].ru_stime) - (
old[1].ru_utime + old[1].ru_stime
)
if used_cpu_time == 0 or new[0] == old[0]:
stats["cpu_average"] = 0
else:
stats["cpu_average"] = math.floor(used_cpu_time / (new[0] - old[0]) * 100)
#
# General statistics
#
store = hs.get_datastores().main
common_metrics = await hs.get_common_usage_metrics_manager().get_metrics()
stats["homeserver"] = hs.config.server.server_name
stats["server_context"] = hs.config.server.server_context
stats["timestamp"] = now
stats["uptime_seconds"] = uptime
version = sys.version_info
stats["python_version"] = "{}.{}.{}".format(
version.major, version.minor, version.micro
)
stats["total_users"] = await store.count_all_users()
total_nonbridged_users = await store.count_nonbridged_users()
stats["total_nonbridged_users"] = total_nonbridged_users
daily_user_type_results = await store.count_daily_user_type()
for name, count in daily_user_type_results.items():
stats["daily_user_type_" + name] = count
room_count = await store.get_room_count()
stats["total_room_count"] = room_count
stats["daily_active_users"] = common_metrics.daily_active_users
stats["monthly_active_users"] = await store.count_monthly_users()
daily_active_e2ee_rooms = await store.count_daily_active_e2ee_rooms()
stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms
stats["daily_e2ee_messages"] = await store.count_daily_e2ee_messages()
daily_sent_e2ee_messages = await store.count_daily_sent_e2ee_messages()
stats["daily_sent_e2ee_messages"] = daily_sent_e2ee_messages
stats["daily_active_rooms"] = await store.count_daily_active_rooms()
stats["daily_messages"] = await store.count_daily_messages()
daily_sent_messages = await store.count_daily_sent_messages()
stats["daily_sent_messages"] = daily_sent_messages
r30v2_results = await store.count_r30v2_users()
for name, count in r30v2_results.items():
stats["r30v2_users_" + name] = count
stats["cache_factor"] = hs.config.caches.global_factor
stats["event_cache_size"] = hs.config.caches.event_cache_size
#
# Database version
#
# This only reports info about the *main* database.
stats["database_engine"] = store.db_pool.engine.module.__name__
stats["database_server_version"] = store.db_pool.engine.server_version
#
# Logging configuration
#
synapse_logger = logging.getLogger("synapse")
log_level = synapse_logger.getEffectiveLevel()
stats["log_level"] = logging.getLevelName(log_level)
logger.info(
"Reporting stats to %s: %s", hs.config.metrics.report_stats_endpoint, stats
)
try:
await hs.get_proxied_http_client().put_json(
hs.config.metrics.report_stats_endpoint, stats
)
except Exception as e:
logger.warning("Error reporting stats: %s", e)
return run_as_background_process(
"phone_stats_home", server_name, _phone_stats_home, hs, stats, stats_process
# Get CPU time in % of a single core, not % of all cores
used_cpu_time = (new[1].ru_utime + new[1].ru_stime) - (
old[1].ru_utime + old[1].ru_stime
)
if used_cpu_time == 0 or new[0] == old[0]:
stats["cpu_average"] = 0
else:
stats["cpu_average"] = math.floor(used_cpu_time / (new[0] - old[0]) * 100)
#
# General statistics
#
store = hs.get_datastores().main
common_metrics = await hs.get_common_usage_metrics_manager().get_metrics()
stats["homeserver"] = hs.config.server.server_name
stats["server_context"] = hs.config.server.server_context
stats["timestamp"] = now
stats["uptime_seconds"] = uptime
version = sys.version_info
stats["python_version"] = "{}.{}.{}".format(
version.major, version.minor, version.micro
)
stats["total_users"] = await store.count_all_users()
total_nonbridged_users = await store.count_nonbridged_users()
stats["total_nonbridged_users"] = total_nonbridged_users
daily_user_type_results = await store.count_daily_user_type()
for name, count in daily_user_type_results.items():
stats["daily_user_type_" + name] = count
room_count = await store.get_room_count()
stats["total_room_count"] = room_count
stats["daily_active_users"] = common_metrics.daily_active_users
stats["monthly_active_users"] = await store.count_monthly_users()
daily_active_e2ee_rooms = await store.count_daily_active_e2ee_rooms()
stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms
stats["daily_e2ee_messages"] = await store.count_daily_e2ee_messages()
daily_sent_e2ee_messages = await store.count_daily_sent_e2ee_messages()
stats["daily_sent_e2ee_messages"] = daily_sent_e2ee_messages
stats["daily_active_rooms"] = await store.count_daily_active_rooms()
stats["daily_messages"] = await store.count_daily_messages()
daily_sent_messages = await store.count_daily_sent_messages()
stats["daily_sent_messages"] = daily_sent_messages
r30v2_results = await store.count_r30v2_users()
for name, count in r30v2_results.items():
stats["r30v2_users_" + name] = count
stats["cache_factor"] = hs.config.caches.global_factor
stats["event_cache_size"] = hs.config.caches.event_cache_size
#
# Database version
#
# This only reports info about the *main* database.
stats["database_engine"] = store.db_pool.engine.module.__name__
stats["database_server_version"] = store.db_pool.engine.server_version
#
# Logging configuration
#
synapse_logger = logging.getLogger("synapse")
log_level = synapse_logger.getEffectiveLevel()
stats["log_level"] = logging.getLevelName(log_level)
logger.info(
"Reporting stats to %s: %s", hs.config.metrics.report_stats_endpoint, stats
)
try:
await hs.get_proxied_http_client().put_json(
hs.config.metrics.report_stats_endpoint, stats
)
except Exception as e:
logger.warning("Error reporting stats: %s", e)
def start_phone_stats_home(hs: "HomeServer") -> None:
"""
Start the background tasks which report phone home stats.
"""
server_name = hs.hostname
clock = hs.get_clock()
stats: JsonDict = {}
@@ -235,39 +210,25 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
)
hs.get_datastores().main.reap_monthly_active_users()
def generate_monthly_active_users() -> "defer.Deferred[None]":
async def _generate_monthly_active_users() -> None:
current_mau_count = 0
current_mau_count_by_service: Mapping[str, int] = {}
reserved_users: Sized = ()
store = hs.get_datastores().main
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
current_mau_count = await store.get_monthly_active_count()
current_mau_count_by_service = (
await store.get_monthly_active_count_by_service()
)
reserved_users = await store.get_registered_reserved_users()
current_mau_gauge.labels(**{SERVER_NAME_LABEL: server_name}).set(
float(current_mau_count)
@wrap_as_background_process("generate_monthly_active_users")
async def generate_monthly_active_users() -> None:
current_mau_count = 0
current_mau_count_by_service: Mapping[str, int] = {}
reserved_users: Sized = ()
store = hs.get_datastores().main
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
current_mau_count = await store.get_monthly_active_count()
current_mau_count_by_service = (
await store.get_monthly_active_count_by_service()
)
reserved_users = await store.get_registered_reserved_users()
current_mau_gauge.set(float(current_mau_count))
for app_service, count in current_mau_count_by_service.items():
current_mau_by_service_gauge.labels(
app_service=app_service, **{SERVER_NAME_LABEL: server_name}
).set(float(count))
for app_service, count in current_mau_count_by_service.items():
current_mau_by_service_gauge.labels(app_service).set(float(count))
registered_reserved_users_mau_gauge.labels(
**{SERVER_NAME_LABEL: server_name}
).set(float(len(reserved_users)))
max_mau_gauge.labels(**{SERVER_NAME_LABEL: server_name}).set(
float(hs.config.server.max_mau_value)
)
return run_as_background_process(
"generate_monthly_active_users",
server_name,
_generate_monthly_active_users,
)
registered_reserved_users_mau_gauge.set(float(len(reserved_users)))
max_mau_gauge.set(float(hs.config.server.max_mau_value))
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
generate_monthly_active_users()

View File

@@ -78,7 +78,7 @@ class ApplicationService:
self,
token: str,
id: str,
sender: UserID,
sender: str,
url: Optional[str] = None,
namespaces: Optional[JsonDict] = None,
hs_token: Optional[str] = None,
@@ -96,8 +96,6 @@ class ApplicationService:
self.hs_token = hs_token
# The full Matrix ID for this application service's sender.
self.sender = sender
# The application service user should be part of the server's domain.
self.server_name = sender.domain # nb must be called this for @cached
self.namespaces = self._check_namespaces(namespaces)
self.id = id
self.ip_range_whitelist = ip_range_whitelist
@@ -225,7 +223,7 @@ class ApplicationService:
"""
return (
# User is the appservice's configured sender_localpart user
user_id == self.sender.to_string()
user_id == self.sender
# User is in the appservice's user namespace
or self.is_user_in_namespace(user_id)
)
@@ -349,7 +347,7 @@ class ApplicationService:
def is_exclusive_user(self, user_id: str) -> bool:
return (
self._is_exclusive(ApplicationService.NS_USERS, user_id)
or user_id == self.sender.to_string()
or user_id == self.sender
)
def is_interested_in_protocol(self, protocol: str) -> bool:

View File

@@ -48,7 +48,6 @@ from synapse.events import EventBase
from synapse.events.utils import SerializeEventConfig, serialize_event
from synapse.http.client import SimpleHttpClient, is_unknown_endpoint
from synapse.logging import opentracing
from synapse.metrics import SERVER_NAME_LABEL
from synapse.types import DeviceListUpdates, JsonDict, JsonMapping, ThirdPartyInstanceID
from synapse.util.caches.response_cache import ResponseCache
@@ -60,31 +59,29 @@ logger = logging.getLogger(__name__)
sent_transactions_counter = Counter(
"synapse_appservice_api_sent_transactions",
"Number of /transactions/ requests sent",
labelnames=["service", SERVER_NAME_LABEL],
["service"],
)
failed_transactions_counter = Counter(
"synapse_appservice_api_failed_transactions",
"Number of /transactions/ requests that failed to send",
labelnames=["service", SERVER_NAME_LABEL],
["service"],
)
sent_events_counter = Counter(
"synapse_appservice_api_sent_events",
"Number of events sent to the AS",
labelnames=["service", SERVER_NAME_LABEL],
"synapse_appservice_api_sent_events", "Number of events sent to the AS", ["service"]
)
sent_ephemeral_counter = Counter(
"synapse_appservice_api_sent_ephemeral",
"Number of ephemeral events sent to the AS",
labelnames=["service", SERVER_NAME_LABEL],
["service"],
)
sent_todevice_counter = Counter(
"synapse_appservice_api_sent_todevice",
"Number of todevice messages sent to the AS",
labelnames=["service", SERVER_NAME_LABEL],
["service"],
)
HOUR_IN_MS = 60 * 60 * 1000
@@ -129,15 +126,11 @@ class ApplicationServiceApi(SimpleHttpClient):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.server_name = hs.hostname
self.clock = hs.get_clock()
self.config = hs.config.appservice
self.protocol_meta_cache: ResponseCache[Tuple[str, str]] = ResponseCache(
clock=hs.get_clock(),
name="as_protocol_meta",
server_name=self.server_name,
timeout_ms=HOUR_IN_MS,
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
)
def _get_headers(self, service: "ApplicationService") -> Dict[bytes, List[bytes]]:
@@ -385,7 +378,6 @@ class ApplicationServiceApi(SimpleHttpClient):
"left": list(device_list_summary.left),
}
labels = {"service": service.id, SERVER_NAME_LABEL: self.server_name}
try:
args = None
if self.config.use_appservice_legacy_authorization:
@@ -403,10 +395,10 @@ class ApplicationServiceApi(SimpleHttpClient):
service.url,
[event.get("event_id") for event in events],
)
sent_transactions_counter.labels(**labels).inc()
sent_events_counter.labels(**labels).inc(len(serialized_events))
sent_ephemeral_counter.labels(**labels).inc(len(ephemeral))
sent_todevice_counter.labels(**labels).inc(len(to_device_messages))
sent_transactions_counter.labels(service.id).inc()
sent_events_counter.labels(service.id).inc(len(serialized_events))
sent_ephemeral_counter.labels(service.id).inc(len(ephemeral))
sent_todevice_counter.labels(service.id).inc(len(to_device_messages))
return True
except CodeMessageException as e:
logger.warning(
@@ -425,7 +417,7 @@ class ApplicationServiceApi(SimpleHttpClient):
ex.args,
exc_info=logger.isEnabledFor(logging.DEBUG),
)
failed_transactions_counter.labels(**labels).inc()
failed_transactions_counter.labels(service.id).inc()
return False
async def claim_client_keys(

View File

@@ -103,16 +103,18 @@ MAX_TO_DEVICE_MESSAGES_PER_TRANSACTION = 100
class ApplicationServiceScheduler:
"""
Public facing API for this module. Does the required dependency injection (DI) to
tie the components together. This also serves as the "event_pool", which in this
"""Public facing API for this module. Does the required DI to tie the
components together. This also serves as the "event_pool", which in this
case is a simple array.
"""
def __init__(self, hs: "HomeServer"):
self.txn_ctrl = _TransactionController(hs)
self.clock = hs.get_clock()
self.store = hs.get_datastores().main
self.queuer = _ServiceQueuer(self.txn_ctrl, hs)
self.as_api = hs.get_application_service_api()
self.txn_ctrl = _TransactionController(self.clock, self.store, self.as_api)
self.queuer = _ServiceQueuer(self.txn_ctrl, self.clock, hs)
async def start(self) -> None:
logger.info("Starting appservice scheduler")
@@ -182,7 +184,9 @@ class _ServiceQueuer:
appservice at a given time.
"""
def __init__(self, txn_ctrl: "_TransactionController", hs: "HomeServer"):
def __init__(
self, txn_ctrl: "_TransactionController", clock: Clock, hs: "HomeServer"
):
# dict of {service_id: [events]}
self.queued_events: Dict[str, List[EventBase]] = {}
# dict of {service_id: [events]}
@@ -195,11 +199,10 @@ class _ServiceQueuer:
# the appservices which currently have a transaction in flight
self.requests_in_flight: Set[str] = set()
self.txn_ctrl = txn_ctrl
self.clock = clock
self._msc3202_transaction_extensions_enabled: bool = (
hs.config.experimental.msc3202_transaction_extensions
)
self.server_name = hs.hostname
self.clock = hs.get_clock()
self._store = hs.get_datastores().main
def start_background_request(self, service: ApplicationService) -> None:
@@ -207,9 +210,7 @@ class _ServiceQueuer:
if service.id in self.requests_in_flight:
return
run_as_background_process(
"as-sender", self.server_name, self._send_request, service
)
run_as_background_process("as-sender", self._send_request, service)
async def _send_request(self, service: ApplicationService) -> None:
# sanity-check: we shouldn't get here if this service already has a sender
@@ -318,7 +319,7 @@ class _ServiceQueuer:
users: Set[str] = set()
# The sender is always included
users.add(service.sender.to_string())
users.add(service.sender)
# All AS users that would receive the PDUs or EDUs sent to these rooms
# are classed as 'interesting'.
@@ -358,11 +359,10 @@ class _TransactionController:
(Note we have only have one of these in the homeserver.)
"""
def __init__(self, hs: "HomeServer"):
self.server_name = hs.hostname
self.clock = hs.get_clock()
self.store = hs.get_datastores().main
self.as_api = hs.get_application_service_api()
def __init__(self, clock: Clock, store: DataStore, as_api: ApplicationServiceApi):
self.clock = clock
self.store = store
self.as_api = as_api
# map from service id to recoverer instance
self.recoverers: Dict[str, "_Recoverer"] = {}
@@ -446,12 +446,7 @@ class _TransactionController:
logger.info("Starting recoverer for AS ID %s", service.id)
assert service.id not in self.recoverers
recoverer = self.RECOVERER_CLASS(
self.server_name,
self.clock,
self.store,
self.as_api,
service,
self.on_recovered,
self.clock, self.store, self.as_api, service, self.on_recovered
)
self.recoverers[service.id] = recoverer
recoverer.recover()
@@ -482,24 +477,21 @@ class _Recoverer:
We have one of these for each appservice which is currently considered DOWN.
Args:
server_name: the homeserver name (used to label metrics) (this should be `hs.hostname`).
clock:
store:
as_api:
service: the service we are managing
callback: called once the service recovers.
clock (synapse.util.Clock):
store (synapse.storage.DataStore):
as_api (synapse.appservice.api.ApplicationServiceApi):
service (synapse.appservice.ApplicationService): the service we are managing
callback (callable[_Recoverer]): called once the service recovers.
"""
def __init__(
self,
server_name: str,
clock: Clock,
store: DataStore,
as_api: ApplicationServiceApi,
service: ApplicationService,
callback: Callable[["_Recoverer"], Awaitable[None]],
):
self.server_name = server_name
self.clock = clock
self.store = store
self.as_api = as_api
@@ -512,11 +504,7 @@ class _Recoverer:
delay = 2**self.backoff_counter
logger.info("Scheduling retries on %s in %fs", self.service.id, delay)
self.scheduled_recovery = self.clock.call_later(
delay,
run_as_background_process,
"as-recoverer",
self.server_name,
self.retry,
delay, run_as_background_process, "as-recoverer", self.retry
)
def _backoff(self) -> None:
@@ -537,7 +525,6 @@ class _Recoverer:
# Run a retry, which will resechedule a recovery if it fails.
run_as_background_process(
"retry",
self.server_name,
self.retry,
)

Some files were not shown because too many files have changed in this diff Show More