Compare commits
86 Commits
erikj/tree
...
anoa/msc33
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6dc4fda9c7 | ||
|
|
c8721fd25b | ||
|
|
fdf9e2f9d6 | ||
|
|
8e35bfc889 | ||
|
|
47fe40b7ca | ||
|
|
5c7d2e05e8 | ||
|
|
f112fd6c12 | ||
|
|
8c255cbb1d | ||
|
|
f66d743eae | ||
|
|
16cccade57 | ||
|
|
c5765a480f | ||
|
|
a17c4e0590 | ||
|
|
e8884cc282 | ||
|
|
08a881dba4 | ||
|
|
4f1eba469c | ||
|
|
2888d7ec83 | ||
|
|
adbf0cffc4 | ||
|
|
9c89707b56 | ||
|
|
51abfe5625 | ||
|
|
9205249be7 | ||
|
|
3aeca2588b | ||
|
|
864c3f85b0 | ||
|
|
652d1669c5 | ||
|
|
54c012c5a8 | ||
|
|
046320b9b6 | ||
|
|
4f4d690423 | ||
|
|
fb60cb16fe | ||
|
|
24a97b3e71 | ||
|
|
e512b25cd1 | ||
|
|
5025dbf7a2 | ||
|
|
d567a8265f | ||
|
|
51e7255fbb | ||
|
|
e70f398f4a | ||
|
|
2920e540bf | ||
|
|
822646b636 | ||
|
|
b8cf480fa9 | ||
|
|
62ed877433 | ||
|
|
e2a1adbf5d | ||
|
|
3d87847ecc | ||
|
|
7982891794 | ||
|
|
b5b5f66084 | ||
|
|
74b89c2761 | ||
|
|
527366f962 | ||
|
|
b087964875 | ||
|
|
2a3cd59dd0 | ||
|
|
a5d8fee097 | ||
|
|
ceb7be56a6 | ||
|
|
eb32bc5056 | ||
|
|
4ea8745724 | ||
|
|
373c485d8c | ||
|
|
3ac412b4e2 | ||
|
|
94bc21e69f | ||
|
|
c2de2ca630 | ||
|
|
a58b550eac | ||
|
|
c369e95691 | ||
|
|
9d8a3234ba | ||
|
|
da77720752 | ||
|
|
f3ad68c343 | ||
|
|
dfe8febe47 | ||
|
|
60c3fea327 | ||
|
|
2506dd7641 | ||
|
|
be3a8a85e3 | ||
|
|
22e91b8019 | ||
|
|
96251af50d | ||
|
|
d69bf3b24c | ||
|
|
9a9568168a | ||
|
|
cf1059d045 | ||
|
|
9e82caac45 | ||
|
|
66d47b44cd | ||
|
|
bb9f156978 | ||
|
|
9b6224577e | ||
|
|
a16931f30d | ||
|
|
5d7c35b4d9 | ||
|
|
dc6b60f68d | ||
|
|
cb59e08062 | ||
|
|
cee9445884 | ||
|
|
6a8310f3df | ||
|
|
501f62d1a6 | ||
|
|
e1779bc69f | ||
|
|
93ac3c197e | ||
|
|
05eb55f57d | ||
|
|
057cc7850a | ||
|
|
de6bb61062 | ||
|
|
7558d294ae | ||
|
|
680a8d4e9e | ||
|
|
802539159e |
@@ -21,7 +21,7 @@ endblock
|
||||
|
||||
block Install Complement Dependencies
|
||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||
go get -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
go install -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
endblock
|
||||
|
||||
block Install custom gotestfmt template
|
||||
|
||||
2
.github/workflows/latest_deps.yml
vendored
2
.github/workflows/latest_deps.yml
vendored
@@ -208,7 +208,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06
|
||||
- uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
||||
10
.github/workflows/tests.yml
vendored
10
.github/workflows/tests.yml
vendored
@@ -197,8 +197,12 @@ jobs:
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
# 1. Mount postgres data files onto a tmpfs in-memory filesystem to reduce overhead of docker's overlayfs layer.
|
||||
# 2. Expose the unix socket for postgres. This removes latency of using docker-proxy for connections.
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
--tmpfs /var/lib/postgres:rw,size=6144m \
|
||||
--mount 'type=bind,src=/var/run/postgresql,dst=/var/run/postgresql' \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
@@ -220,10 +224,10 @@ jobs:
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- run: poetry run trial --jobs=6 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_HOST: /var/run/postgresql
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
@@ -292,7 +296,7 @@ jobs:
|
||||
python-version: '3.7'
|
||||
extras: "all test"
|
||||
|
||||
- run: poetry run trial -j2 tests
|
||||
- run: poetry run trial -j6 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
|
||||
2
.github/workflows/twisted_trunk.yml
vendored
2
.github/workflows/twisted_trunk.yml
vendored
@@ -174,7 +174,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06
|
||||
- uses: JasonEtco/create-an-issue@3a8ba796516b57db8cb2ee6dfc65bc76cd39d56d # v2.8.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -36,6 +36,7 @@ __pycache__/
|
||||
|
||||
# For direnv users
|
||||
/.envrc
|
||||
.direnv/
|
||||
|
||||
# IDEs
|
||||
/.idea/
|
||||
|
||||
81
CHANGES.md
81
CHANGES.md
@@ -1,8 +1,85 @@
|
||||
Synapse 1.73.0rc2 (2022-12-01)
|
||||
Synapse 1.74.0rc1 (2022-12-13)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Improve user search for international display names. ([\#14464](https://github.com/matrix-org/synapse/issues/14464))
|
||||
- Stop using deprecated `keyIds` parameter when calling `/_matrix/key/v2/server`. ([\#14490](https://github.com/matrix-org/synapse/issues/14490), [\#14525](https://github.com/matrix-org/synapse/issues/14525))
|
||||
- Add new `push.enabled` config option to allow opting out of push notification calculation. ([\#14551](https://github.com/matrix-org/synapse/issues/14551), [\#14619](https://github.com/matrix-org/synapse/issues/14619))
|
||||
- Advertise support for Matrix 1.5 on `/_matrix/client/versions`. ([\#14576](https://github.com/matrix-org/synapse/issues/14576))
|
||||
- Improve opentracing and logging for to-device message handling. ([\#14598](https://github.com/matrix-org/synapse/issues/14598))
|
||||
- Allow selecting "prejoin" events by state keys in addition to event types. ([\#14642](https://github.com/matrix-org/synapse/issues/14642))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing bug where a device list update might not be sent to clients in certain circumstances. ([\#14435](https://github.com/matrix-org/synapse/issues/14435), [\#14592](https://github.com/matrix-org/synapse/issues/14592), [\#14604](https://github.com/matrix-org/synapse/issues/14604))
|
||||
- Suppress a spurious warning when `POST /rooms/<room_id>/<membership>/`, `POST /join/<room_id_or_alias`, or the unspecced `PUT /join/<room_id_or_alias>/<txn_id>` receive an empty HTTP request body. ([\#14600](https://github.com/matrix-org/synapse/issues/14600))
|
||||
- Return spec-compliant JSON errors when unknown endpoints are requested. ([\#14620](https://github.com/matrix-org/synapse/issues/14620), [\#14621](https://github.com/matrix-org/synapse/issues/14621))
|
||||
- Update html templates to load images over HTTPS. Contributed by @ashfame. ([\#14625](https://github.com/matrix-org/synapse/issues/14625))
|
||||
- Fix a long-standing bug where the user directory would return 1 more row than requested. ([\#14631](https://github.com/matrix-org/synapse/issues/14631))
|
||||
- Reject invalid read receipt requests with empty room or event IDs. Contributed by Nick @ Beeper (@fizzadar). ([\#14632](https://github.com/matrix-org/synapse/issues/14632))
|
||||
- Fix a bug introduced in Synapse 1.67.0 where not specifying a config file or a server URL would lead to the `register_new_matrix_user` script failing. ([\#14637](https://github.com/matrix-org/synapse/issues/14637))
|
||||
- Fix a long-standing bug where the user directory and room/user stats might be out of sync. ([\#14639](https://github.com/matrix-org/synapse/issues/14639), [\#14643](https://github.com/matrix-org/synapse/issues/14643))
|
||||
- Fix a bug introduced in Synapse 1.72.0 where the background updates to add non-thread unique indexes on receipts would fail if they were previously interrupted. ([\#14650](https://github.com/matrix-org/synapse/issues/14650))
|
||||
- Improve validation of field size limits in events. ([\#14664](https://github.com/matrix-org/synapse/issues/14664))
|
||||
- Fix bugs introduced in Synapse 1.55.0 and 1.69.0 where application services would not be notified of events in the correct rooms, due to stale caches. ([\#14670](https://github.com/matrix-org/synapse/issues/14670))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Update worker settings for `pusher` and `federation_sender` functionality. ([\#14493](https://github.com/matrix-org/synapse/issues/14493))
|
||||
- Add links to third party package repositories, and point to the bug which highlights Ubuntu's out-of-date packages. ([\#14517](https://github.com/matrix-org/synapse/issues/14517))
|
||||
- Remove old, incorrect minimum postgres version note and replace with a link to the [Dependency Deprecation Policy](https://matrix-org.github.io/synapse/v1.73/deprecation_policy.html). ([\#14590](https://github.com/matrix-org/synapse/issues/14590))
|
||||
- Add Single-Sign On setup instructions for Mastodon-based instances. ([\#14594](https://github.com/matrix-org/synapse/issues/14594))
|
||||
- Change `turn_allow_guests` example value to lowercase `true`. ([\#14634](https://github.com/matrix-org/synapse/issues/14634))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Optimise push badge count calculations. Contributed by Nick @ Beeper (@fizzadar). ([\#14255](https://github.com/matrix-org/synapse/issues/14255))
|
||||
- Faster remote room joins: stream the un-partial-stating of rooms over replication. ([\#14473](https://github.com/matrix-org/synapse/issues/14473), [\#14474](https://github.com/matrix-org/synapse/issues/14474))
|
||||
- Share the `ClientRestResource` for both workers and the main process. ([\#14528](https://github.com/matrix-org/synapse/issues/14528))
|
||||
- Add `--editable` flag to `complement.sh` which uses an editable install of Synapse for faster turn-around times whilst developing iteratively. ([\#14548](https://github.com/matrix-org/synapse/issues/14548))
|
||||
- Faster joins: use servers list approximation to send read receipts when in partial state instead of waiting for the full state of the room. ([\#14549](https://github.com/matrix-org/synapse/issues/14549))
|
||||
- Modernize unit tests configuration related to workers. ([\#14568](https://github.com/matrix-org/synapse/issues/14568))
|
||||
- Bump jsonschema from 4.17.0 to 4.17.3. ([\#14591](https://github.com/matrix-org/synapse/issues/14591))
|
||||
- Fix Rust lint CI. ([\#14602](https://github.com/matrix-org/synapse/issues/14602))
|
||||
- Bump JasonEtco/create-an-issue from 2.5.0 to 2.8.1. ([\#14607](https://github.com/matrix-org/synapse/issues/14607))
|
||||
- Alter some unit test environment parameters to decrease time spent running tests. ([\#14610](https://github.com/matrix-org/synapse/issues/14610))
|
||||
- Switch to Go recommended installation method for `gotestfmt` template in CI. ([\#14611](https://github.com/matrix-org/synapse/issues/14611))
|
||||
- Bump phonenumbers from 8.13.0 to 8.13.1. ([\#14612](https://github.com/matrix-org/synapse/issues/14612))
|
||||
- Bump types-setuptools from 65.5.0.3 to 65.6.0.1. ([\#14613](https://github.com/matrix-org/synapse/issues/14613))
|
||||
- Bump twine from 4.0.1 to 4.0.2. ([\#14614](https://github.com/matrix-org/synapse/issues/14614))
|
||||
- Bump types-requests from 2.28.11.2 to 2.28.11.5. ([\#14615](https://github.com/matrix-org/synapse/issues/14615))
|
||||
- Bump cryptography from 38.0.3 to 38.0.4. ([\#14616](https://github.com/matrix-org/synapse/issues/14616))
|
||||
- Remove useless cargo install with apt from Dockerfile. ([\#14636](https://github.com/matrix-org/synapse/issues/14636))
|
||||
- Bump certifi from 2021.10.8 to 2022.12.7. ([\#14645](https://github.com/matrix-org/synapse/issues/14645))
|
||||
- Bump flake8-bugbear from 22.10.27 to 22.12.6. ([\#14656](https://github.com/matrix-org/synapse/issues/14656))
|
||||
- Bump packaging from 21.3 to 22.0. ([\#14657](https://github.com/matrix-org/synapse/issues/14657))
|
||||
- Bump types-pillow from 9.3.0.1 to 9.3.0.4. ([\#14658](https://github.com/matrix-org/synapse/issues/14658))
|
||||
- Bump serde from 1.0.148 to 1.0.150. ([\#14659](https://github.com/matrix-org/synapse/issues/14659))
|
||||
- Bump phonenumbers from 8.13.1 to 8.13.2. ([\#14660](https://github.com/matrix-org/synapse/issues/14660))
|
||||
- Bump authlib from 1.1.0 to 1.2.0. ([\#14661](https://github.com/matrix-org/synapse/issues/14661))
|
||||
- Move `StateFilter` to `synapse.types`. ([\#14668](https://github.com/matrix-org/synapse/issues/14668))
|
||||
- Improve type hints. ([\#14597](https://github.com/matrix-org/synapse/issues/14597), [\#14646](https://github.com/matrix-org/synapse/issues/14646), [\#14671](https://github.com/matrix-org/synapse/issues/14671))
|
||||
|
||||
|
||||
Synapse 1.73.0 (2022-12-06)
|
||||
===========================
|
||||
|
||||
Please note that legacy Prometheus metric names have been removed in this release; see [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.73/docs/upgrade.md#legacy-prometheus-metric-names-have-now-been-removed) for more details.
|
||||
|
||||
No significant changes since 1.73.0rc2.
|
||||
|
||||
|
||||
Synapse 1.73.0rc2 (2022-12-01)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
@@ -17,7 +94,7 @@ Features
|
||||
|
||||
- Speed-up `/messages` with `filter_events_for_client` optimizations. ([\#14527](https://github.com/matrix-org/synapse/issues/14527))
|
||||
- Improve DB performance by reducing amount of data that gets read in `device_lists_changes_in_room`. ([\#14534](https://github.com/matrix-org/synapse/issues/14534))
|
||||
- Adds support for handling avatar in SSO login. Contributed by @ashfame. ([\#13917](https://github.com/matrix-org/synapse/issues/13917))
|
||||
- Add support for handling avatar in SSO OIDC login. Contributed by @ashfame. ([\#13917](https://github.com/matrix-org/synapse/issues/13917))
|
||||
- Move MSC3030 `/timestamp_to_event` endpoints to stable `v1` location (`/_matrix/client/v1/rooms/<roomID>/timestamp_to_event?ts=<timestamp>&dir=<direction>`, `/_matrix/federation/v1/timestamp_to_event/<roomID>?ts=<timestamp>&dir=<direction>`). ([\#14471](https://github.com/matrix-org/synapse/issues/14471))
|
||||
- Reduce database load of [Client-Server endpoints](https://spec.matrix.org/v1.5/client-server-api/#aggregations) which return bundled aggregations. ([\#14491](https://github.com/matrix-org/synapse/issues/14491), [\#14508](https://github.com/matrix-org/synapse/issues/14508), [\#14510](https://github.com/matrix-org/synapse/issues/14510))
|
||||
- Add unstable support for an Extensible Events room version (`org.matrix.msc1767.10`) via [MSC1767](https://github.com/matrix-org/matrix-spec-proposals/pull/1767), [MSC3931](https://github.com/matrix-org/matrix-spec-proposals/pull/3931), [MSC3932](https://github.com/matrix-org/matrix-spec-proposals/pull/3932), and [MSC3933](https://github.com/matrix-org/matrix-spec-proposals/pull/3933). ([\#14520](https://github.com/matrix-org/synapse/issues/14520), [\#14521](https://github.com/matrix-org/synapse/issues/14521), [\#14524](https://github.com/matrix-org/synapse/issues/14524))
|
||||
|
||||
12
Cargo.lock
generated
12
Cargo.lock
generated
@@ -37,9 +37,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "blake2"
|
||||
version = "0.10.5"
|
||||
version = "0.10.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b12e5fd123190ce1c2e559308a94c9bacad77907d4c6005d9e58fe1a0689e55e"
|
||||
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
|
||||
dependencies = [
|
||||
"digest",
|
||||
]
|
||||
@@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.148"
|
||||
version = "1.0.151"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e53f64bb4ba0191d6d0676e1b141ca55047d83b74f5607e6d8eb88126c52c2dc"
|
||||
checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.148"
|
||||
version = "1.0.151"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a55492425aa53521babf6137309e7d34c20bbfbbfcfe2c7f3a047fd1f6b92c0c"
|
||||
checksum = "255abe9a125a985c05190d687b320c12f9b1f0b99445e608c21ba0782c719ad8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Optimise push badge count calculations. Contributed by Nick @ Beeper (@fizzadar).
|
||||
1
changelog.d/14263.misc
Normal file
1
changelog.d/14263.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve performance of the `/hierarchy` endpoint.
|
||||
@@ -1 +0,0 @@
|
||||
Stop using deprecated `keyIds` parameter when calling `/_matrix/key/v2/server`.
|
||||
@@ -1 +0,0 @@
|
||||
Update worker settings for `pusher` and `federation_sender` functionality.
|
||||
@@ -1 +0,0 @@
|
||||
Add links to third party package repositories, and point to the bug which highlights Ubuntu's out-of-date packages.
|
||||
@@ -1 +0,0 @@
|
||||
Stop using deprecated `keyIds` parameter when calling `/_matrix/key/v2/server`.
|
||||
@@ -1 +0,0 @@
|
||||
Share the `ClientRestResource` for both workers and the main process.
|
||||
1
changelog.d/14545.misc
Normal file
1
changelog.d/14545.misc
Normal file
@@ -0,0 +1 @@
|
||||
Faster remote room joins: stream the un-partial-stating of events over replication.
|
||||
1
changelog.d/14546.misc
Normal file
1
changelog.d/14546.misc
Normal file
@@ -0,0 +1 @@
|
||||
Faster remote room joins: stream the un-partial-stating of events over replication.
|
||||
@@ -1 +0,0 @@
|
||||
Faster joins: use servers list approximation to send read receipts when in partial state instead of waiting for the full state of the room.
|
||||
@@ -1 +0,0 @@
|
||||
Add new `push.enabled` config option to allow opting out of push notification calculation.
|
||||
@@ -1 +0,0 @@
|
||||
Modernize unit tests configuration related to workers.
|
||||
@@ -1 +0,0 @@
|
||||
Advertise support for Matrix 1.5 on `/_matrix/client/versions`.
|
||||
@@ -1 +0,0 @@
|
||||
Bump jsonschema from 4.17.0 to 4.17.3.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where a device list update might not be sent to clients in certain circumstances.
|
||||
@@ -1 +0,0 @@
|
||||
Fix Rust lint CI.
|
||||
@@ -1 +0,0 @@
|
||||
Bump JasonEtco/create-an-issue from 2.5.0 to 2.8.1.
|
||||
1
changelog.d/14644.bugfix
Normal file
1
changelog.d/14644.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix the *MAU Limits* section of the Grafana dashboard relying on a specific `job` name for the workers of a Synapse deployment.
|
||||
1
changelog.d/14665.misc
Normal file
1
changelog.d/14665.misc
Normal file
@@ -0,0 +1 @@
|
||||
Change `handle_new_client_event` signature so that a 429 does not reach clients on `PartialStateConflictError`, and internally retry when needed instead.
|
||||
1
changelog.d/14669.bugfix
Normal file
1
changelog.d/14669.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in Synapse 1.70.0 which could cause spurious `UNIQUE constraint failed` errors in the `rotate_notifs` background job.
|
||||
1
changelog.d/14672.misc
Normal file
1
changelog.d/14672.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove dependency on jQuery on reCAPTCHA page.
|
||||
1
changelog.d/14673.doc
Normal file
1
changelog.d/14673.doc
Normal file
@@ -0,0 +1 @@
|
||||
Declare support for Python 3.11.
|
||||
1
changelog.d/14674.doc
Normal file
1
changelog.d/14674.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fix `target_memory_usage` being used in the description for the actual `cache_autotune` sub-option `target_cache_memory_usage`.
|
||||
1
changelog.d/14676.misc
Normal file
1
changelog.d/14676.misc
Normal file
@@ -0,0 +1 @@
|
||||
Faster joins: make `computer_state_after_events` consistent with other state-fetching functions that take a `StateFilter`.
|
||||
1
changelog.d/14681.misc
Normal file
1
changelog.d/14681.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add missing type hints.
|
||||
1
changelog.d/14685.misc
Normal file
1
changelog.d/14685.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve type annotations for the helper methods on a `CachedFunction`.
|
||||
1
changelog.d/14693.misc
Normal file
1
changelog.d/14693.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump JasonEtco/create-an-issue from 2.8.1 to 2.8.2.
|
||||
1
changelog.d/14695.misc
Normal file
1
changelog.d/14695.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump blake2 from 0.10.5 to 0.10.6.
|
||||
1
changelog.d/14697.misc
Normal file
1
changelog.d/14697.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump serde from 1.0.150 to 1.0.151.
|
||||
1
changelog.d/14699.misc
Normal file
1
changelog.d/14699.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump hiredis from 2.0.0 to 2.1.0.
|
||||
1
changelog.d/14700.misc
Normal file
1
changelog.d/14700.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump types-jsonschema from 4.17.0.1 to 4.17.0.2.
|
||||
1
changelog.d/14701.misc
Normal file
1
changelog.d/14701.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump sentry-sdk from 1.11.1 to 1.12.0.
|
||||
1
changelog.d/14702.misc
Normal file
1
changelog.d/14702.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump types-setuptools from 65.6.0.1 to 65.6.0.2.
|
||||
1
changelog.d/14707.misc
Normal file
1
changelog.d/14707.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add `.direnv/` directory to .gitignore to prevent local state generated by the [direnv](https://direnv.net/) development tool from being committed.
|
||||
@@ -1008,8 +1008,7 @@
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
"color": "green"
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
@@ -1681,8 +1680,7 @@
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
"color": "green"
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
@@ -2533,8 +2531,7 @@
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
"color": "green"
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
@@ -11296,7 +11293,7 @@
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "synapse_admin_mau_max{instance=\"$instance\", job=~\"(hhs_)?synapse\"}",
|
||||
"expr": "max(synapse_admin_mau_max{instance=\"$instance\"})",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
@@ -11310,7 +11307,7 @@
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "synapse_admin_mau_current{instance=\"$instance\", job=~\"(hhs_)?synapse\"}",
|
||||
"expr": "max(synapse_admin_mau_current{instance=\"$instance\"})",
|
||||
"hide": false,
|
||||
"legendFormat": "Current",
|
||||
"range": true,
|
||||
@@ -12760,6 +12757,6 @@
|
||||
"timezone": "",
|
||||
"title": "Synapse",
|
||||
"uid": "000000012",
|
||||
"version": 149,
|
||||
"version": 150,
|
||||
"weekStart": ""
|
||||
}
|
||||
14
debian/changelog
vendored
14
debian/changelog
vendored
@@ -1,3 +1,17 @@
|
||||
matrix-synapse-py3 (1.74.0~rc1) stable; urgency=medium
|
||||
|
||||
* New dependency on libicu-dev to provide improved results for user
|
||||
search.
|
||||
* New Synapse release 1.74.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Dec 2022 13:30:01 +0000
|
||||
|
||||
matrix-synapse-py3 (1.73.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.73.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Dec 2022 11:48:56 +0000
|
||||
|
||||
matrix-synapse-py3 (1.73.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.73.0rc2.
|
||||
|
||||
2
debian/control
vendored
2
debian/control
vendored
@@ -8,6 +8,8 @@ Build-Depends:
|
||||
dh-virtualenv (>= 1.1),
|
||||
libsystemd-dev,
|
||||
libpq-dev,
|
||||
libicu-dev,
|
||||
pkg-config,
|
||||
lsb-release,
|
||||
python3-dev,
|
||||
python3,
|
||||
|
||||
@@ -43,7 +43,7 @@ RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential cargo git libffi-dev libssl-dev \
|
||||
build-essential git libffi-dev libssl-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
@@ -97,6 +97,8 @@ RUN \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
libicu-dev \
|
||||
pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
|
||||
@@ -84,6 +84,8 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
python3-venv \
|
||||
sqlite3 \
|
||||
libpq-dev \
|
||||
libicu-dev \
|
||||
pkg-config \
|
||||
xmlsec1
|
||||
|
||||
# Install rust and ensure it's in the PATH
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
|
||||
# first of all, we create a base image with an nginx which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
@@ -23,7 +24,7 @@ FROM debian:bullseye-slim AS deps_base
|
||||
FROM redis:6-bullseye AS redis_base
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
FROM $FROM
|
||||
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
|
||||
@@ -7,8 +7,9 @@
|
||||
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
|
||||
FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
FROM $FROM
|
||||
# First of all, we copy postgres server from the official postgres image,
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
|
||||
@@ -102,6 +102,8 @@ experimental_features:
|
||||
{% endif %}
|
||||
# Filtering /messages by relation type.
|
||||
msc3874_enabled: true
|
||||
# Enable removing account data support
|
||||
msc3391_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
|
||||
75
docker/editable.Dockerfile
Normal file
75
docker/editable.Dockerfile
Normal file
@@ -0,0 +1,75 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# This dockerfile builds an editable install of Synapse.
|
||||
#
|
||||
# Used by `complement.sh`. Not suitable for production use.
|
||||
|
||||
ARG PYTHON_VERSION=3.9
|
||||
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bullseye.
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
||||
|
||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||
# install the OS build deps
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libpq-dev \
|
||||
libssl-dev \
|
||||
libwebp-dev \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
openssl \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
gosu \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp6 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
|
||||
# Make a base copy of the editable source tree, so that we have something to
|
||||
# install and build now — even though it's going to be covered up by a mount
|
||||
# at runtime.
|
||||
COPY synapse /editable-src/synapse/
|
||||
COPY rust /editable-src/rust/
|
||||
# ... and what we need to `pip install`.
|
||||
COPY pyproject.toml poetry.lock README.rst build_rust.py Cargo.toml Cargo.lock /editable-src/
|
||||
|
||||
RUN pip install poetry
|
||||
RUN poetry config virtualenvs.create false
|
||||
RUN cd /editable-src && poetry install --extras all
|
||||
|
||||
# Make copies of useful things for inspection:
|
||||
# - the Rust module (must be copied to the editable source tree before startup)
|
||||
# - poetry.lock is useful for checking if dependencies have changed.
|
||||
RUN cp /editable-src/synapse/synapse_rust.abi3.so /synapse_rust.abi3.so.bak
|
||||
RUN cp /editable-src/poetry.lock /poetry.lock.bak
|
||||
|
||||
|
||||
### Extra setup from original Dockerfile
|
||||
COPY ./docker/start.py /start.py
|
||||
COPY ./docker/conf /conf
|
||||
|
||||
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||
|
||||
ENTRYPOINT ["/start.py"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
@@ -590,3 +590,44 @@ oidc_providers:
|
||||
display_name_template: "{{ user.first_name }} {{ user.last_name }}"
|
||||
email_template: "{{ user.email }}"
|
||||
```
|
||||
|
||||
### Mastodon
|
||||
|
||||
[Mastodon](https://docs.joinmastodon.org/) instances provide an [OAuth API](https://docs.joinmastodon.org/spec/oauth/), allowing those instances to be used as a single sign-on provider for Synapse.
|
||||
|
||||
The first step is to register Synapse as an application with your Mastodon instance, using the [Create an application API](https://docs.joinmastodon.org/methods/apps/#create) (see also [here](https://docs.joinmastodon.org/client/token/)). There are several ways to do this, but in the example below we are using CURL.
|
||||
|
||||
This example assumes that:
|
||||
* the Mastodon instance website URL is `https://your.mastodon.instance.url`, and
|
||||
* Synapse will be registered as an app named `my_synapse_app`.
|
||||
|
||||
Send the following request, substituting the value of `synapse_public_baseurl` from your Synapse installation.
|
||||
```sh
|
||||
curl -d "client_name=my_synapse_app&redirect_uris=https://[synapse_public_baseurl]/_synapse/client/oidc/callback" -X POST https://your.mastodon.instance.url/api/v1/apps
|
||||
```
|
||||
|
||||
You should receive a response similar to the following. Make sure to save it.
|
||||
```json
|
||||
{"client_id":"someclientid_123","client_secret":"someclientsecret_123","id":"12345","name":"my_synapse_app","redirect_uri":"https://[synapse_public_baseurl]/_synapse/client/oidc/callback","website":null,"vapid_key":"somerandomvapidkey_123"}
|
||||
```
|
||||
|
||||
As the Synapse login mechanism needs an attribute to uniquely identify users, and Mastodon's endpoint does not return a `sub` property, an alternative `subject_claim` has to be set. Your Synapse configuration should include the following:
|
||||
|
||||
```yaml
|
||||
oidc_providers:
|
||||
- idp_id: my_mastodon
|
||||
idp_name: "Mastodon Instance Example"
|
||||
discover: false
|
||||
issuer: "https://your.mastodon.instance.url/@admin"
|
||||
client_id: "someclientid_123"
|
||||
client_secret: "someclientsecret_123"
|
||||
authorization_endpoint: "https://your.mastodon.instance.url/oauth/authorize"
|
||||
token_endpoint: "https://your.mastodon.instance.url/oauth/token"
|
||||
userinfo_endpoint: "https://your.mastodon.instance.url/api/v1/accounts/verify_credentials"
|
||||
scopes: ["read"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_claim: "id"
|
||||
```
|
||||
|
||||
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Using Postgres
|
||||
|
||||
Synapse supports PostgreSQL versions 10 or later.
|
||||
The minimum supported version of PostgreSQL is determined by the [Dependency
|
||||
Deprecation Policy](deprecation_policy.md).
|
||||
|
||||
## Install postgres client libraries
|
||||
|
||||
|
||||
@@ -200,7 +200,7 @@ When following this route please make sure that the [Platform-specific prerequis
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.7 or later, up to Python 3.10.
|
||||
- Python 3.7 or later, up to Python 3.11.
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
If building on an uncommon architecture for which pre-built wheels are
|
||||
|
||||
@@ -38,7 +38,7 @@ As an example, here is the relevant section of the config file for `matrix.org`.
|
||||
turn_uris: [ "turn:turn.matrix.org?transport=udp", "turn:turn.matrix.org?transport=tcp" ]
|
||||
turn_shared_secret: "n0t4ctuAllymatr1Xd0TorgSshar3d5ecret4obvIousreAsons"
|
||||
turn_user_lifetime: 86400000
|
||||
turn_allow_guests: True
|
||||
turn_allow_guests: true
|
||||
|
||||
After updating the homeserver configuration, you must restart synapse:
|
||||
|
||||
|
||||
@@ -79,7 +79,7 @@ Here we can see that the request has been tagged with `GET-37`. (The tag depends
|
||||
grep 'GET-37' homeserver.log
|
||||
```
|
||||
|
||||
If you want to paste that output into a github issue or matrix room, please remember to surround it with triple-backticks (```) to make it legible (see https://help.github.com/en/articles/basic-writing-and-formatting-syntax#quoting-code).
|
||||
If you want to paste that output into a github issue or matrix room, please remember to surround it with triple-backticks (```) to make it legible (see [quoting code](https://help.github.com/en/articles/basic-writing-and-formatting-syntax#quoting-code)).
|
||||
|
||||
|
||||
What do all those fields in the 'Processed' line mean?
|
||||
|
||||
@@ -1148,7 +1148,7 @@ number of entries that can be stored.
|
||||
* `max_cache_memory_usage` sets a ceiling on how much memory the cache can use before caches begin to be continuously evicted.
|
||||
They will continue to be evicted until the memory usage drops below the `target_memory_usage`, set in
|
||||
the setting below, or until the `min_cache_ttl` is hit. There is no default value for this option.
|
||||
* `target_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value
|
||||
* `target_cache_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value
|
||||
for this option.
|
||||
* `min_cache_ttl` sets a limit under which newer cache entries are not evicted and is only applied when
|
||||
caches are actively being evicted/`max_cache_memory_usage` has been exceeded. This is to protect hot caches
|
||||
@@ -2501,32 +2501,53 @@ Config settings related to the client/server API
|
||||
---
|
||||
### `room_prejoin_state`
|
||||
|
||||
Controls for the state that is shared with users who receive an invite
|
||||
to a room. By default, the following state event types are shared with users who
|
||||
receive invites to the room:
|
||||
- m.room.join_rules
|
||||
- m.room.canonical_alias
|
||||
- m.room.avatar
|
||||
- m.room.encryption
|
||||
- m.room.name
|
||||
- m.room.create
|
||||
- m.room.topic
|
||||
This setting controls the state that is shared with users upon receiving an
|
||||
invite to a room, or in reply to a knock on a room. By default, the following
|
||||
state events are shared with users:
|
||||
|
||||
- `m.room.join_rules`
|
||||
- `m.room.canonical_alias`
|
||||
- `m.room.avatar`
|
||||
- `m.room.encryption`
|
||||
- `m.room.name`
|
||||
- `m.room.create`
|
||||
- `m.room.topic`
|
||||
|
||||
To change the default behavior, use the following sub-options:
|
||||
* `disable_default_event_types`: set to true to disable the above defaults. If this
|
||||
is enabled, only the event types listed in `additional_event_types` are shared.
|
||||
Defaults to false.
|
||||
* `additional_event_types`: Additional state event types to share with users when they are invited
|
||||
to a room. By default, this list is empty (so only the default event types are shared).
|
||||
* `disable_default_event_types`: boolean. Set to `true` to disable the above
|
||||
defaults. If this is enabled, only the event types listed in
|
||||
`additional_event_types` are shared. Defaults to `false`.
|
||||
* `additional_event_types`: A list of additional state events to include in the
|
||||
events to be shared. By default, this list is empty (so only the default event
|
||||
types are shared).
|
||||
|
||||
Each entry in this list should be either a single string or a list of two
|
||||
strings.
|
||||
* A standalone string `t` represents all events with type `t` (i.e.
|
||||
with no restrictions on state keys).
|
||||
* A pair of strings `[t, s]` represents a single event with type `t` and
|
||||
state key `s`. The same type can appear in two entries with different state
|
||||
keys: in this situation, both state keys are included in prejoin state.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
room_prejoin_state:
|
||||
disable_default_event_types: true
|
||||
disable_default_event_types: false
|
||||
additional_event_types:
|
||||
- org.example.custom.event.type
|
||||
- m.room.join_rules
|
||||
# Share all events of type `org.example.custom.event.typeA`
|
||||
- org.example.custom.event.typeA
|
||||
# Share only events of type `org.example.custom.event.typeB` whose
|
||||
# state_key is "foo"
|
||||
- ["org.example.custom.event.typeB", "foo"]
|
||||
# Share only events of type `org.example.custom.event.typeC` whose
|
||||
# state_key is "bar" or "baz"
|
||||
- ["org.example.custom.event.typeC", "bar"]
|
||||
- ["org.example.custom.event.typeC", "baz"]
|
||||
```
|
||||
|
||||
*Changed in Synapse 1.74:* admins can filter the events in prejoin state based
|
||||
on their state key.
|
||||
|
||||
---
|
||||
### `track_puppeted_user_ips`
|
||||
|
||||
@@ -3355,7 +3376,7 @@ Configuration settings related to push notifications
|
||||
This setting defines options for push notifications.
|
||||
|
||||
This option has a number of sub-options. They are as follows:
|
||||
* `enable_push`: Enables or disables push notification calculation. Note, disabling this will also
|
||||
* `enabled`: Enables or disables push notification calculation. Note, disabling this will also
|
||||
stop unread counts being calculated for rooms. This mode of operation is intended
|
||||
for homeservers which may only have bots or appservice users connected, or are otherwise
|
||||
not interested in push/unread counters. This is enabled by default.
|
||||
@@ -3379,7 +3400,7 @@ This option has a number of sub-options. They are as follows:
|
||||
Example configuration:
|
||||
```yaml
|
||||
push:
|
||||
enable_push: true
|
||||
enabled: true
|
||||
include_content: false
|
||||
group_unread_count_by_room: false
|
||||
```
|
||||
|
||||
28
mypy.ini
28
mypy.ini
@@ -12,6 +12,7 @@ local_partial_types = True
|
||||
no_implicit_optional = True
|
||||
disallow_untyped_defs = True
|
||||
strict_equality = True
|
||||
warn_redundant_casts = True
|
||||
|
||||
files =
|
||||
docker/,
|
||||
@@ -35,8 +36,6 @@ exclude = (?x)
|
||||
|tests/api/test_ratelimiting.py
|
||||
|tests/app/test_openid_listener.py
|
||||
|tests/appservice/test_scheduler.py
|
||||
|tests/config/test_cache.py
|
||||
|tests/config/test_tls.py
|
||||
|tests/crypto/test_keyring.py
|
||||
|tests/events/test_presence_router.py
|
||||
|tests/events/test_utils.py
|
||||
@@ -88,7 +87,13 @@ disallow_untyped_defs = False
|
||||
[mypy-tests.*]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-tests.handlers.test_user_directory]
|
||||
[mypy-tests.config.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.federation.transport.test_client]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.handlers.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.metrics.test_background_process_metrics]
|
||||
@@ -97,28 +102,19 @@ disallow_untyped_defs = True
|
||||
[mypy-tests.push.test_bulk_push_rule_evaluator]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.test_server]
|
||||
[mypy-tests.rest.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.state.test_profile]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.storage.test_id_generators]
|
||||
[mypy-tests.storage.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.storage.test_profile]
|
||||
[mypy-tests.test_server]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.handlers.test_sso]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.storage.test_user_directory]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.rest.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.federation.transport.test_client]
|
||||
[mypy-tests.types.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.util.caches.*]
|
||||
|
||||
296
poetry.lock
generated
296
poetry.lock
generated
@@ -13,8 +13,8 @@ tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900
|
||||
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "Authlib"
|
||||
version = "1.1.0"
|
||||
name = "authlib"
|
||||
version = "1.2.0"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
category = "main"
|
||||
optional = true
|
||||
@@ -106,11 +106,11 @@ frozendict = ["frozendict (>=1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2021.10.8"
|
||||
version = "2022.12.7"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
@@ -186,7 +186,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "38.0.3"
|
||||
version = "38.0.4"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
category = "main"
|
||||
optional = false
|
||||
@@ -260,7 +260,7 @@ pyflakes = ">=2.5.0,<2.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-bugbear"
|
||||
version = "22.10.27"
|
||||
version = "22.12.6"
|
||||
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -318,11 +318,11 @@ typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""
|
||||
|
||||
[[package]]
|
||||
name = "hiredis"
|
||||
version = "2.0.0"
|
||||
version = "2.1.0"
|
||||
description = "Python wrapper for hiredis"
|
||||
category = "main"
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "hyperlink"
|
||||
@@ -633,14 +633,11 @@ tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pyte
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "21.3"
|
||||
version = "22.0"
|
||||
description = "Core utilities for Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "parameterized"
|
||||
@@ -663,7 +660,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "8.13.0"
|
||||
version = "8.13.2"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
category = "main"
|
||||
optional = false
|
||||
@@ -837,6 +834,14 @@ category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
|
||||
[[package]]
|
||||
name = "pyicu"
|
||||
version = "2.10.2"
|
||||
description = "Python extension wrapping the ICU C++ API"
|
||||
category = "main"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
version = "2.4.0"
|
||||
@@ -901,17 +906,6 @@ cryptography = ">=38.0.0,<39"
|
||||
docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"]
|
||||
test = ["flaky", "pretend", "pytest (>=3.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.0.7"
|
||||
description = "Python parsing module"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.extras]
|
||||
diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pyrsistent"
|
||||
version = "0.18.1"
|
||||
@@ -1076,7 +1070,7 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "1.11.1"
|
||||
version = "1.12.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
category = "main"
|
||||
optional = true
|
||||
@@ -1097,6 +1091,7 @@ falcon = ["falcon (>=1.4)"]
|
||||
fastapi = ["fastapi (>=0.79.0)"]
|
||||
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
||||
httpx = ["httpx (>=0.16.0)"]
|
||||
opentelemetry = ["opentelemetry-distro (>=0.350b0)"]
|
||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||
pymongo = ["pymongo (>=3.1)"]
|
||||
pyspark = ["pyspark (>=2.4.4)"]
|
||||
@@ -1295,7 +1290,7 @@ docs = ["sphinx (>=1.4.8)"]
|
||||
|
||||
[[package]]
|
||||
name = "twine"
|
||||
version = "4.0.1"
|
||||
version = "4.0.2"
|
||||
description = "Collection of utilities for publishing packages on PyPI"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1424,7 +1419,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-jsonschema"
|
||||
version = "4.17.0.1"
|
||||
version = "4.17.0.2"
|
||||
description = "Typing stubs for jsonschema"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1440,7 +1435,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "9.3.0.1"
|
||||
version = "9.3.0.4"
|
||||
description = "Typing stubs for Pillow"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1475,7 +1470,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.28.11.2"
|
||||
version = "2.28.11.5"
|
||||
description = "Typing stubs for requests"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1486,7 +1481,7 @@ types-urllib3 = "<1.27"
|
||||
|
||||
[[package]]
|
||||
name = "types-setuptools"
|
||||
version = "65.5.0.3"
|
||||
version = "65.6.0.2"
|
||||
description = "Typing stubs for setuptools"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1622,7 +1617,7 @@ docs = ["Sphinx", "repoze.sphinx.autointerface"]
|
||||
test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"]
|
||||
|
||||
[extras]
|
||||
all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler"]
|
||||
all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler", "pyicu"]
|
||||
cache-memory = ["Pympler"]
|
||||
jwt = ["authlib"]
|
||||
matrix-synapse-ldap3 = ["matrix-synapse-ldap3"]
|
||||
@@ -1635,20 +1630,21 @@ sentry = ["sentry-sdk"]
|
||||
systemd = ["systemd-python"]
|
||||
test = ["parameterized", "idna"]
|
||||
url-preview = ["lxml"]
|
||||
user-search = ["pyicu"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.7.1"
|
||||
content-hash = "27811bd21d56ceeb0f68ded5a00375efcd1a004928f0736f5b02927ce8594cb0"
|
||||
content-hash = "f20007013f33bc35a01e412c48adc62a936030f3074e06286674c5ad7f44d300"
|
||||
|
||||
[metadata.files]
|
||||
attrs = [
|
||||
{file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
|
||||
{file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
|
||||
]
|
||||
Authlib = [
|
||||
{file = "Authlib-1.1.0-py2.py3-none-any.whl", hash = "sha256:be4b6a1dea51122336c210a6945b27a105b9ac572baffd15b07bcff4376c1523"},
|
||||
{file = "Authlib-1.1.0.tar.gz", hash = "sha256:0a270c91409fc2b7b0fbee6996e09f2ee3187358762111a9a4225c874b94e891"},
|
||||
authlib = [
|
||||
{file = "Authlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a"},
|
||||
{file = "Authlib-1.2.0.tar.gz", hash = "sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d"},
|
||||
]
|
||||
automat = [
|
||||
{file = "Automat-22.10.0-py2.py3-none-any.whl", hash = "sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180"},
|
||||
@@ -1709,8 +1705,8 @@ canonicaljson = [
|
||||
{file = "canonicaljson-1.6.4.tar.gz", hash = "sha256:6c09b2119511f30eb1126cfcd973a10824e20f1cfd25039cde3d1218dd9c8d8f"},
|
||||
]
|
||||
certifi = [
|
||||
{file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
|
||||
{file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
|
||||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
]
|
||||
cffi = [
|
||||
{file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
|
||||
@@ -1788,32 +1784,32 @@ constantly = [
|
||||
{file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"},
|
||||
]
|
||||
cryptography = [
|
||||
{file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-win32.whl", hash = "sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-win_amd64.whl", hash = "sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220"},
|
||||
{file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd"},
|
||||
{file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55"},
|
||||
{file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a"},
|
||||
{file = "cryptography-38.0.3.tar.gz", hash = "sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-win32.whl", hash = "sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d"},
|
||||
{file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353"},
|
||||
{file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7"},
|
||||
{file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9"},
|
||||
{file = "cryptography-38.0.4.tar.gz", hash = "sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290"},
|
||||
]
|
||||
defusedxml = [
|
||||
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
|
||||
@@ -1836,8 +1832,8 @@ flake8 = [
|
||||
{file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"},
|
||||
]
|
||||
flake8-bugbear = [
|
||||
{file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"},
|
||||
{file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"},
|
||||
{file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"},
|
||||
{file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"},
|
||||
]
|
||||
flake8-comprehensions = [
|
||||
{file = "flake8-comprehensions-3.10.1.tar.gz", hash = "sha256:412052ac4a947f36b891143430fef4859705af11b2572fbb689f90d372cf26ab"},
|
||||
@@ -1871,47 +1867,94 @@ gitpython = [
|
||||
{file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"},
|
||||
]
|
||||
hiredis = [
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"},
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"},
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"},
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"},
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"},
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"},
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"},
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"},
|
||||
{file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"},
|
||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"},
|
||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"},
|
||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"},
|
||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"},
|
||||
{file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"},
|
||||
{file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"},
|
||||
{file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"},
|
||||
{file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"},
|
||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"},
|
||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"},
|
||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"},
|
||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"},
|
||||
{file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"},
|
||||
{file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"},
|
||||
{file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"},
|
||||
{file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"},
|
||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"},
|
||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"},
|
||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"},
|
||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"},
|
||||
{file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"},
|
||||
{file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"},
|
||||
{file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"},
|
||||
{file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"},
|
||||
{file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"},
|
||||
{file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"},
|
||||
{file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"},
|
||||
{file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"},
|
||||
{file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"},
|
||||
{file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"},
|
||||
{file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"},
|
||||
{file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:7b339a7542a3f6a10b3bbc157e4abc9bae9628e2df7faf5f8a32f730014719ae"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dd82370c2f9f804ec617b95d25edb0fd04882251afb2ecdf08b9ced0c3aa4bcc"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:92077511d3a62109d5d11bf584e41264a993ae3c77c72de63c1f741b7809bacb"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6544c7807cbb75bc6ae9ab85773b4413edbcd55342e9e3d7d3f159f677f7428"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8181d73f25943fbdca904154e51b845317103cee08116cfae258f96927ce1e74"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040f861e4e43daa9287f3a85979542f9c7ee8cfab695fa662f3b6186c6f7d5e8"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef5ae8c1af82a8000742003cb16a6fa6c57919abb861ab214dcb27db8573ee64"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b9aa1b0ec46dec5b05dcec22e50bbd4af33da121fca83bd2601dc60c79183f9"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c53c36a630a6c6fd9dfe439f4266e564ca58995015a780c1d964567ebf328466"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05aab35210bd7fbd7bd066efb2a42eb5c2878c2c137a9cff597204be2c07475b"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e6097e1cef647c665f71cd0e58346389580db98365e804f7a9ad5d96e66b7150"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:32f98370efed38088d000df2eb2c8ed43d93d99bbf4a0a740e15eb4a887cc23f"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b85276ed57e0aee8910b48383a38a299851935ba134460bad394988c750985fe"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-win32.whl", hash = "sha256:bd9d99606008a8cfa6b9e950abaa35f5b87496f03e63b73197d02b0fe7ecb6d3"},
|
||||
{file = "hiredis-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:6a8e796c94b7b8c63c99757d6ec2075069e4c362dfb0f130aaf874422bea3e7d"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:e7bb5cab604fc45b45cee40e84e84d9e30eeb34c571a3784392ae658273bbd23"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e0d4b074ff5ebba00933da27a06f3752b8af2448a6aa9dc895d5279f43011530"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f0c2dbaffd4a9e8df04731a012c8a67b7517abec7e53bb12c3cd749865c63428"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c19151e79b36e0d849899a21fc10539aa1903af94b31754bddab1bea876cd508"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08ec41519a533f5cd1f1f8bd1797929358117c8e4570b679b469f768b45b7dbf"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f0db3667fa8abbd37ac66385b460841029033bfc1ba8d7e5b3ff1e01d3346a"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f592d1522b5981890b34b0b814f4bfa4a68b23ee90f538aac321d17e8bf859c8"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dddd2be67de25a62b3bf871f091181c13da3b32186d4be6af49dadbf6fdc266d"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4ee8f6d0774cd6179c625688201e961a2d03da212230adaa2193cfb7a04f9169"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5000942ffb6b6410ccbc87089c15fde5f48bd205664ee8b3067e6b2fb5689485"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:21e0017b8f50abd13b4c4c4218c7dfd5a42623e3255b460dfa5f70b45c4e7c3e"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:40b55fb46fcc78b04190176c0ae28bfa3cc7f418fca9df06c037028af5942b6a"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:24a55169a7f0bd9458935ac644bf8191f127c8aa50cdd70c0b87928cc515cae5"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-win32.whl", hash = "sha256:bb60f79e8c1eb5971b10fd256764ea0c89c4ad2d55ac4379981f678f349411f2"},
|
||||
{file = "hiredis-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:b223668844f26034759a6c24a72f0bb8e4fb64a43b27e2f3e8378639eaac1661"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:7f7e7d91d6533fcb1939d467cf8bfb98640edf715897959f31ae83f5ad29aed3"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531d1d3955244831b69272b993e16f93489ce2dadfdf800ac856dc2d9a43d353"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66ffcbfc4db52dd87cdfd53bda45881ab3ab07c80ec43244fd8d70ee69d42c01"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:023b3b3ac410d6cfdb45ee943b8c528c90379f31419a1fd229888aa2b965732d"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c972385a0647120d4b0fe0e9567257cad7b2577b9f1315815713c571af0e778d"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32893825426e73d57b3290b68110dd76229945e6c79b08a37795f536501935c4"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:262148f9b616c0cdd0f2c6bda45cd0f1ce6ce2d1974efd296b85b44e5c7567c2"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d601c27b9599fe52cade3096351f92f665e527d29af8d3e29353a76bfcf5615"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d248acc7d7713c1b3d48ed8ea67d6ba43b104aa67d63078846a3590adbab6b73"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:969ffe37a8980a6e5404993ccfe605a40fa6732fa6d7b26a1a718c9121197002"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:288d5d0566d3cbcd800e46c7a547428d321842898b8c7de037a7e78b5644e88a"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:06cb776d3cd3cbec86010f1bab6895ee16af8036aae8c3594a5e96c24f0f83a5"},
|
||||
{file = "hiredis-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6766376dc43ef186113422ecacec0ece0d4b12c0e5f4b556669e639b20ccabb1"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:41afba30304adcbe1c93fc8272a7169b7fc4e4d3d470ad8babd391678a519d76"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6df0115f8b0766cd3d12416e2e2e914efed5b1a1a27605c9f37bc92de086877a"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d7d7078f3b841ad86e35459e9f1a49db6d793b796a25fe866333166196d9fec"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:835c4cbf8b38c83240b3eb9bd575cd1bfefe5ea5c46cc5ac2bf2d1f47d1fd696"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:718589c48e97820bdc2a99e2621b5039884cc23199213756054d10cd309ad56c"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2d96be6917ea8f753691a4674f682dd5e145b70edab28c05aa5552ae873e843"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5fe1bb4b1525751f3050337097b3b2bfe445836e59a5a0984928dd0797f9abf"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91dc73310b92b4aeccffdcd4a762955fe71380f5eaa4e242ee95019e41519101"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bb858218de60a930a164a991fff001c70b0c3d923d3ae40fef2acf3321126b00"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:53040c3b3488b52f4609775453fc759262f2885b733150ee2e1d88257fdafed8"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1c9b7d6d7bf35e1e2217b2847710154b11d25bf86b77bb7e190161f8b89917e"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:dfbe939fdddbc7b90cab4124f3ddd6391099fb964f6dab3386aa8cf56f37b5ba"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a51cb4ea466276a845a940931357b4a876f903eabde514ba95e45050e1c2150"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-win32.whl", hash = "sha256:8bce4c687136bf13df76072072b9baadbd52f7d1b143fbbda96387f50e8ebaeb"},
|
||||
{file = "hiredis-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:1f94684b13fbbee1239303018d5ea900d786e486cdb130cde3144d53f4e262e4"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:879668ffab582bdffd9f10f6c8797aac055db183f266e3aa3a6438ff0768bc29"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f1d5a99de0fd02438f251e50ec64936d22d542c8e5d80bdec236f9713eeef334"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab622bcddcf334b4b1fc4b22e163e93160e3afdd7feaedd77ac6f258e0c77b68"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c4f23ff450fb8d73edf06fc7475a4e81a3f9b03a9a04a907ec81c84052fcf"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9f8b8daef346ffc0268d7086c213ab24c2a3fcbd4249eacfbb3635602c79d20"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e2039cdaa2e6656eae4a2e2537ed77e27f29b7487b97ce7ae6a3cb88d01b968"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43d3168da0a81fa0a9e4bc6e14316beac8e5f1b439ca5cc5af7f9a558cfba741"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0021ba034b74c5006f62e4cfdd79d04c7c720731eda256ce29d769ac6483adc3"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39a1bb45bcd698baf70ad4e9a94af164525bf053caea7df3777172d20d69538a"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c1b636b05777536a83b4cced157cbdc2d0012d494a9ec2f7b7e07c54296cd773"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:58a7ceb71f967fcc1878fb64666a12fbc5f243ab00d0653d3752a811941d8261"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c5263c676dc4d55202e7ca0429b949fc6ba7c0dd3a3a2b80538593ab27d82836"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b5879d13025b04903ddf71921812db27fe1156a0952ad253014354d72463aaa9"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-win32.whl", hash = "sha256:9259f637d77544ffeb97acb0a87fdd192a8aced7a2fbd7439160dbee8341d446"},
|
||||
{file = "hiredis-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fb818b6e0981e16dfdfc9e507c9842f8d210e6ecaf3edb8ac3039dbd24768839"},
|
||||
{file = "hiredis-2.1.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:648d4648bf6b3dcc418a974df143b2f96627ab8b50bda23a57759c273880ecfb"},
|
||||
{file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:654949cfc0bc76a5292b6ac111113b2eafb0739e0496495368981ea2e80bf4ec"},
|
||||
{file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2a98b835c2088998a47da51b1b3661b587b2d4b3305d03fc9893888cc2aa54"},
|
||||
{file = "hiredis-2.1.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7222bd9243387d778245619d0ac62d35cf72ee746ec0efb7b9b230ae3e0c3a39"},
|
||||
{file = "hiredis-2.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:778f6de73c3abd67d447a3442f89e7d43a8de1eb5093f416af14dddc1d5c9cb5"},
|
||||
{file = "hiredis-2.1.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c4cfb61fe642f30a22789055847004393bc65b5686988c64191e379ea4ccd069"},
|
||||
{file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03b6bef7eb50415aca87200a511d66a2fd69f1fcc75cfe1408e1201cbe28ddfb"},
|
||||
{file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3195e13a700f6ff35894c4920fcce8f6c2b01cdbc01f76fe567753c495849e9b"},
|
||||
{file = "hiredis-2.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19f724405c808a89db422ed1010caab80a16d3e5b49632356ae7912513b6d58e"},
|
||||
{file = "hiredis-2.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8ecebeff966b412138b0cd105d7572f8d5e65e96355af699863890f8370707e6"},
|
||||
{file = "hiredis-2.1.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4f34eefaf164bf43b29ccc809c168248eb95001837ed0e9e3279891f57ae2fab"},
|
||||
{file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11fad16beb9d623ea423c9129bab0e392ea4c84363d61c125f679be3d029442f"},
|
||||
{file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c763eb9a1414c4d665945c70ae2ef74a843600667b0069fe90e2aabc78e5411"},
|
||||
{file = "hiredis-2.1.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edb7f156a8f8a1999574f27bda67dd2bff2d5b180bb6aed996a1792cafbcc668"},
|
||||
{file = "hiredis-2.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e057d5545189d4c9e22ae0f7dc283ea0a225f56999511022c062cce7f9589d69"},
|
||||
]
|
||||
hyperlink = [
|
||||
{file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"},
|
||||
@@ -2246,8 +2289,8 @@ opentracing = [
|
||||
{file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
|
||||
]
|
||||
packaging = [
|
||||
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
||||
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
||||
{file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"},
|
||||
{file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"},
|
||||
]
|
||||
parameterized = [
|
||||
{file = "parameterized-0.8.1-py2.py3-none-any.whl", hash = "sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9"},
|
||||
@@ -2258,8 +2301,8 @@ pathspec = [
|
||||
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
|
||||
]
|
||||
phonenumbers = [
|
||||
{file = "phonenumbers-8.13.0-py2.py3-none-any.whl", hash = "sha256:dbaea9e4005a976bcf18fbe2bb87cb9cd0a3f119136f04188ac412d7741cebf0"},
|
||||
{file = "phonenumbers-8.13.0.tar.gz", hash = "sha256:93745d7afd38e246660bb601b07deac54eeb76c8e5e43f5e83333b0383a0a1e4"},
|
||||
{file = "phonenumbers-8.13.2-py2.py3-none-any.whl", hash = "sha256:884b26f775205261f4dc861371dce217c1661a4942fb3ec3624e290fb51869bf"},
|
||||
{file = "phonenumbers-8.13.2.tar.gz", hash = "sha256:0179f688d48c0e7e161eb7b9d86d587940af1f5174f97c1fdfd893c599c0d94a"},
|
||||
]
|
||||
pillow = [
|
||||
{file = "Pillow-9.3.0-1-cp37-cp37m-win32.whl", hash = "sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74"},
|
||||
@@ -2427,6 +2470,9 @@ pygments = [
|
||||
{file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"},
|
||||
{file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"},
|
||||
]
|
||||
pyicu = [
|
||||
{file = "PyICU-2.10.2.tar.gz", hash = "sha256:0c3309eea7fab6857507ace62403515b60fe096cbfb4f90d14f55ff75c5441c1"},
|
||||
]
|
||||
pyjwt = [
|
||||
{file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"},
|
||||
{file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"},
|
||||
@@ -2455,10 +2501,6 @@ pyopenssl = [
|
||||
{file = "pyOpenSSL-22.1.0-py3-none-any.whl", hash = "sha256:b28437c9773bb6c6958628cf9c3bebe585de661dba6f63df17111966363dd15e"},
|
||||
{file = "pyOpenSSL-22.1.0.tar.gz", hash = "sha256:7a83b7b272dd595222d672f5ce29aa030f1fb837630ef229f62e72e395ce8968"},
|
||||
]
|
||||
pyparsing = [
|
||||
{file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
|
||||
{file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
|
||||
]
|
||||
pyrsistent = [
|
||||
{file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"},
|
||||
{file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"},
|
||||
@@ -2569,8 +2611,8 @@ semantic-version = [
|
||||
{file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"},
|
||||
]
|
||||
sentry-sdk = [
|
||||
{file = "sentry-sdk-1.11.1.tar.gz", hash = "sha256:675f6279b6bb1fea09fd61751061f9a90dca3b5929ef631dd50dc8b3aeb245e9"},
|
||||
{file = "sentry_sdk-1.11.1-py2.py3-none-any.whl", hash = "sha256:8b4ff696c0bdcceb3f70bbb87a57ba84fd3168b1332d493fcd16c137f709578c"},
|
||||
{file = "sentry-sdk-1.12.0.tar.gz", hash = "sha256:dc0fe6ef2f77a3853b399c75c97d87be7666098817c1c314f8fcdf68a6865914"},
|
||||
{file = "sentry_sdk-1.12.0-py2.py3-none-any.whl", hash = "sha256:3c9bc64025976842c1103cd75d45cff94a7c0cc48fa07770d07a09d2ab8dac30"},
|
||||
]
|
||||
service-identity = [
|
||||
{file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"},
|
||||
@@ -2729,8 +2771,8 @@ treq = [
|
||||
{file = "treq-22.2.0.tar.gz", hash = "sha256:df757e3f141fc782ede076a604521194ffcb40fa2645cf48e5a37060307f52ec"},
|
||||
]
|
||||
twine = [
|
||||
{file = "twine-4.0.1-py3-none-any.whl", hash = "sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e"},
|
||||
{file = "twine-4.0.1.tar.gz", hash = "sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0"},
|
||||
{file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"},
|
||||
{file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"},
|
||||
]
|
||||
twisted = [
|
||||
{file = "Twisted-22.10.0-py3-none-any.whl", hash = "sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0"},
|
||||
@@ -2801,16 +2843,16 @@ types-ipaddress = [
|
||||
{file = "types_ipaddress-1.0.8-py3-none-any.whl", hash = "sha256:4933b74da157ba877b1a705d64f6fa7742745e9ffd65e51011f370c11ebedb55"},
|
||||
]
|
||||
types-jsonschema = [
|
||||
{file = "types-jsonschema-4.17.0.1.tar.gz", hash = "sha256:62625d492e4930411a431909ac32301aeab6180500e70ee222f81d43204cfb3c"},
|
||||
{file = "types_jsonschema-4.17.0.1-py3-none-any.whl", hash = "sha256:77badbe3881cbf79ac9561be2be2b1f37ab104b13afd2231840e6dd6e94e63c2"},
|
||||
{file = "types-jsonschema-4.17.0.2.tar.gz", hash = "sha256:8b9e1140d4d780f0f19b5cab1b8a3732e8dd5e49dbc1f174cc0b499125ca6f6c"},
|
||||
{file = "types_jsonschema-4.17.0.2-py3-none-any.whl", hash = "sha256:8fd2f9aea4da54f9a811baa6963aac10fd680c18baa6237392c079b97d152738"},
|
||||
]
|
||||
types-opentracing = [
|
||||
{file = "types-opentracing-2.4.10.tar.gz", hash = "sha256:6101414f3b6d3b9c10f1c510a261e8439b6c8d67c723d5c2872084697b4580a7"},
|
||||
{file = "types_opentracing-2.4.10-py3-none-any.whl", hash = "sha256:66d9cfbbdc4a6f8ca8189a15ad26f0fe41cee84c07057759c5d194e2505b84c2"},
|
||||
]
|
||||
types-pillow = [
|
||||
{file = "types-Pillow-9.3.0.1.tar.gz", hash = "sha256:f3b7cada3fa496c78d75253c6b1f07a843d625f42e5639b320a72acaff6f7cfb"},
|
||||
{file = "types_Pillow-9.3.0.1-py3-none-any.whl", hash = "sha256:79837755fe9659f29efd1016e9903ac4a500e0c73260483f07296bd6ca47668b"},
|
||||
{file = "types-Pillow-9.3.0.4.tar.gz", hash = "sha256:c18d466dc18550d96b8b4a279ff94f0cbad696825b5ad55466604f1daf5709de"},
|
||||
{file = "types_Pillow-9.3.0.4-py3-none-any.whl", hash = "sha256:98b8484ff343676f6f7051682a6cfd26896e993e86b3ce9badfa0ec8750f5405"},
|
||||
]
|
||||
types-psycopg2 = [
|
||||
{file = "types-psycopg2-2.9.21.2.tar.gz", hash = "sha256:bff045579642ce00b4a3c8f2e401b7f96dfaa34939f10be64b0dd3b53feca57d"},
|
||||
@@ -2825,12 +2867,12 @@ types-pyyaml = [
|
||||
{file = "types_PyYAML-6.0.12.2-py3-none-any.whl", hash = "sha256:1e94e80aafee07a7e798addb2a320e32956a373f376655128ae20637adb2655b"},
|
||||
]
|
||||
types-requests = [
|
||||
{file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"},
|
||||
{file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"},
|
||||
{file = "types-requests-2.28.11.5.tar.gz", hash = "sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a"},
|
||||
{file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"},
|
||||
]
|
||||
types-setuptools = [
|
||||
{file = "types-setuptools-65.5.0.3.tar.gz", hash = "sha256:17769171f5f2a2dc69b25c0d3106552a5cda767bbf6b36cb6212b26dae5aa9fc"},
|
||||
{file = "types_setuptools-65.5.0.3-py3-none-any.whl", hash = "sha256:9254c32b0cc91c486548e7d7561243b5bd185402a383e93c6691e1b9bc8d86e2"},
|
||||
{file = "types-setuptools-65.6.0.2.tar.gz", hash = "sha256:ad60ccf01d626de9762224448f36c13e0660e863afd6dc11d979b3739a6c7d24"},
|
||||
{file = "types_setuptools-65.6.0.2-py3-none-any.whl", hash = "sha256:2c2b4f756f79778074ce2d21f745aa737b12160d9f8dfa274f47a7287c7a2fee"},
|
||||
]
|
||||
types-urllib3 = [
|
||||
{file = "types-urllib3-1.26.10.tar.gz", hash = "sha256:a26898f530e6c3f43f25b907f2b884486868ffd56a9faa94cbf9b3eb6e165d6a"},
|
||||
|
||||
@@ -57,7 +57,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.73.0rc2"
|
||||
version = "1.74.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -141,7 +141,8 @@ pyasn1 = ">=0.1.9"
|
||||
pyasn1-modules = ">=0.0.7"
|
||||
bcrypt = ">=3.1.7"
|
||||
Pillow = ">=5.4.0"
|
||||
sortedcontainers = ">=1.4.4"
|
||||
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
|
||||
sortedcontainers = ">=1.5.2"
|
||||
pymacaroons = ">=0.13.0"
|
||||
msgpack = ">=0.5.2"
|
||||
phonenumbers = ">=8.2.0"
|
||||
@@ -207,6 +208,7 @@ hiredis = { version = "*", optional = true }
|
||||
Pympler = { version = "*", optional = true }
|
||||
parameterized = { version = ">=0.7.4", optional = true }
|
||||
idna = { version = ">=2.5", optional = true }
|
||||
pyicu = { version = ">=2.10.2", optional = true }
|
||||
|
||||
[tool.poetry.extras]
|
||||
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
|
||||
@@ -229,6 +231,10 @@ redis = ["txredisapi", "hiredis"]
|
||||
# Required to use experimental `caches.track_memory_usage` config option.
|
||||
cache-memory = ["pympler"]
|
||||
test = ["parameterized", "idna"]
|
||||
# Allows for better search for international characters in the user directory. This
|
||||
# requires libicu's development headers installed on the system (e.g. libicu-dev on
|
||||
# Debian-based distributions).
|
||||
user-search = ["pyicu"]
|
||||
|
||||
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
|
||||
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
|
||||
@@ -260,6 +266,8 @@ all = [
|
||||
"txredisapi", "hiredis",
|
||||
# cache-memory
|
||||
"pympler",
|
||||
# improved user search
|
||||
"pyicu",
|
||||
# omitted:
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
# - systemd: this is a system-based requirement
|
||||
|
||||
@@ -53,6 +53,12 @@ Run the complement test suite on Synapse.
|
||||
Only build the Docker images. Don't actually run Complement.
|
||||
Conflicts with -f/--fast.
|
||||
|
||||
-e, --editable
|
||||
Use an editable build of Synapse, rebuilding the image if necessary.
|
||||
This is suitable for use in development where a fast turn-around time
|
||||
is important.
|
||||
Not suitable for use in CI in case the editable environment is impure.
|
||||
|
||||
For help on arguments to 'go test', run 'go help testflag'.
|
||||
EOF
|
||||
}
|
||||
@@ -73,6 +79,9 @@ while [ $# -ge 1 ]; do
|
||||
"--build-only")
|
||||
skip_complement_run=1
|
||||
;;
|
||||
"-e"|"--editable")
|
||||
use_editable_synapse=1
|
||||
;;
|
||||
*)
|
||||
# unknown arg: presumably an argument to gotest. break the loop.
|
||||
break
|
||||
@@ -96,25 +105,76 @@ if [[ -z "$COMPLEMENT_DIR" ]]; then
|
||||
echo "Checkout available at 'complement-${COMPLEMENT_REF}'"
|
||||
fi
|
||||
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
if [[ -e synapse/synapse_rust.abi3.so ]]; then
|
||||
# In an editable install, back up the host's compiled Rust module to prevent
|
||||
# inconvenience; the container will overwrite the module with its own copy.
|
||||
mv -n synapse/synapse_rust.abi3.so synapse/synapse_rust.abi3.so~host
|
||||
# And restore it on exit:
|
||||
synapse_pkg=`realpath synapse`
|
||||
trap "mv -f '$synapse_pkg/synapse_rust.abi3.so~host' '$synapse_pkg/synapse_rust.abi3.so'" EXIT
|
||||
fi
|
||||
|
||||
editable_mount="$(realpath .):/editable-src:z"
|
||||
if docker inspect complement-synapse-editable &>/dev/null; then
|
||||
# complement-synapse-editable already exists: see if we can still use it:
|
||||
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
|
||||
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
|
||||
|
||||
# First set up the module in the right place for an editable installation.
|
||||
docker run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
if (docker run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
|
||||
&& docker run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
|
||||
skip_docker_build=1
|
||||
else
|
||||
echo "Editable Synapse image is stale. Will rebuild."
|
||||
unset skip_docker_build
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$skip_docker_build" ]; then
|
||||
# Build the base Synapse image from the local checkout
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
|
||||
docker build -t matrixdotorg/synapse \
|
||||
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
|
||||
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
|
||||
-f "docker/Dockerfile" .
|
||||
echo_if_github "::endgroup::"
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
|
||||
# Build the workers docker image (from the base Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
|
||||
docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
echo_if_github "::endgroup::"
|
||||
# Build a special image designed for use in development with editable
|
||||
# installs.
|
||||
docker build -t synapse-editable \
|
||||
-f "docker/editable.Dockerfile" .
|
||||
|
||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||
docker build -t complement-synapse \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
echo_if_github "::endgroup::"
|
||||
docker build -t synapse-workers-editable \
|
||||
--build-arg FROM=synapse-editable \
|
||||
-f "docker/Dockerfile-workers" .
|
||||
|
||||
docker build -t complement-synapse-editable \
|
||||
--build-arg FROM=synapse-workers-editable \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
|
||||
# Prepare the Rust module
|
||||
docker run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
else
|
||||
|
||||
# Build the base Synapse image from the local checkout
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
|
||||
docker build -t matrixdotorg/synapse \
|
||||
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
|
||||
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
|
||||
-f "docker/Dockerfile" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build the workers docker image (from the base Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
|
||||
docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||
docker build -t complement-synapse \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -n "$skip_complement_run" ]; then
|
||||
@@ -123,10 +183,14 @@ if [ -n "$skip_complement_run" ]; then
|
||||
fi
|
||||
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse-editable
|
||||
export COMPLEMENT_HOST_MOUNTS="$editable_mount"
|
||||
fi
|
||||
|
||||
extra_test_args=()
|
||||
|
||||
test_tags="synapse_blacklist,msc3787,msc3874"
|
||||
test_tags="synapse_blacklist,msc3787,msc3874,msc3391"
|
||||
|
||||
# All environment variables starting with PASS_ will be shared.
|
||||
# (The prefix is stripped off before reaching the container.)
|
||||
|
||||
@@ -27,7 +27,7 @@ import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, List, Optional, cast
|
||||
from typing import Any, List, Optional
|
||||
|
||||
import attr
|
||||
import click
|
||||
@@ -174,9 +174,7 @@ def _prepare() -> None:
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Switch to the release branch.
|
||||
# Cast safety: parse() won't return a version.LegacyVersion from our
|
||||
# version string format.
|
||||
parsed_new_version = cast(version.Version, version.parse(new_version))
|
||||
parsed_new_version = version.parse(new_version)
|
||||
|
||||
# We assume for debian changelogs that we only do RCs or full releases.
|
||||
assert not parsed_new_version.is_devrelease
|
||||
|
||||
25
stubs/icu.pyi
Normal file
25
stubs/icu.pyi
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Stub for PyICU.
|
||||
|
||||
class Locale:
|
||||
@staticmethod
|
||||
def getDefault() -> Locale: ...
|
||||
|
||||
class BreakIterator:
|
||||
@staticmethod
|
||||
def createWordInstance(locale: Locale) -> BreakIterator: ...
|
||||
def setText(self, text: str) -> None: ...
|
||||
def nextBoundary(self) -> int: ...
|
||||
@@ -45,7 +45,7 @@ class PushRuleEvaluator:
|
||||
notification_power_levels: Mapping[str, int],
|
||||
related_events_flattened: Mapping[str, Mapping[str, str]],
|
||||
related_event_match_enabled: bool,
|
||||
room_version_feature_flags: list[str],
|
||||
room_version_feature_flags: Tuple[str, ...],
|
||||
msc3931_enabled: bool,
|
||||
): ...
|
||||
def run(
|
||||
|
||||
@@ -222,6 +222,7 @@ def main() -> None:
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: Optional[Dict[str, Any]] = None
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
|
||||
@@ -229,7 +230,7 @@ def main() -> None:
|
||||
secret = args.shared_secret
|
||||
else:
|
||||
# argparse should check that we have either config or shared secret
|
||||
assert config
|
||||
assert config is not None
|
||||
|
||||
secret = config.get("registration_shared_secret")
|
||||
secret_file = config.get("registration_shared_secret_path")
|
||||
@@ -244,7 +245,7 @@ def main() -> None:
|
||||
|
||||
if args.server_url:
|
||||
server_url = args.server_url
|
||||
elif config:
|
||||
elif config is not None:
|
||||
server_url = _find_client_listener(config)
|
||||
if not server_url:
|
||||
server_url = _DEFAULT_SERVER_URL
|
||||
|
||||
@@ -152,6 +152,7 @@ class EduTypes:
|
||||
|
||||
class RejectedReason:
|
||||
AUTH_ERROR: Final = "auth_error"
|
||||
OVERSIZED_EVENT: Final = "oversized_event"
|
||||
|
||||
|
||||
class RoomCreationPreset:
|
||||
@@ -230,6 +231,9 @@ class EventContentFields:
|
||||
# The authorising user for joining a restricted room.
|
||||
AUTHORISING_USER: Final = "join_authorised_via_users_server"
|
||||
|
||||
# an unspecced field added to to-device messages to identify them uniquely-ish
|
||||
TO_DEVICE_MSGID: Final = "org.matrix.msgid"
|
||||
|
||||
|
||||
class RoomTypes:
|
||||
"""Understood values of the room_type field of m.room.create events."""
|
||||
|
||||
@@ -300,10 +300,8 @@ class InteractiveAuthIncompleteError(Exception):
|
||||
class UnrecognizedRequestError(SynapseError):
|
||||
"""An error indicating we don't understand the request you're trying to make"""
|
||||
|
||||
def __init__(
|
||||
self, msg: str = "Unrecognized request", errcode: str = Codes.UNRECOGNIZED
|
||||
):
|
||||
super().__init__(400, msg, errcode)
|
||||
def __init__(self, msg: str = "Unrecognized request", code: int = 400):
|
||||
super().__init__(code, msg, Codes.UNRECOGNIZED)
|
||||
|
||||
|
||||
class NotFoundError(SynapseError):
|
||||
@@ -426,8 +424,17 @@ class ResourceLimitError(SynapseError):
|
||||
class EventSizeError(SynapseError):
|
||||
"""An error raised when an event is too big."""
|
||||
|
||||
def __init__(self, msg: str):
|
||||
def __init__(self, msg: str, unpersistable: bool):
|
||||
"""
|
||||
unpersistable:
|
||||
if True, the PDU must not be persisted, not even as a rejected PDU
|
||||
when received over federation.
|
||||
This is notably true when the entire PDU exceeds the size limit for a PDU,
|
||||
(as opposed to an individual key's size limit being exceeded).
|
||||
"""
|
||||
|
||||
super().__init__(413, msg, Codes.TOO_LARGE)
|
||||
self.unpersistable = unpersistable
|
||||
|
||||
|
||||
class LoginError(SynapseError):
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Callable, Dict, List, Optional
|
||||
from typing import Callable, Dict, Optional, Tuple
|
||||
|
||||
import attr
|
||||
|
||||
@@ -103,7 +103,7 @@ class RoomVersion:
|
||||
# is not enough to mark it "supported": the push rule evaluator also needs to
|
||||
# support the flag. Unknown flags are ignored by the evaluator, making conditions
|
||||
# fail if used.
|
||||
msc3931_push_features: List[str] # values from PushRuleRoomFlag
|
||||
msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag
|
||||
|
||||
|
||||
class RoomVersions:
|
||||
@@ -124,7 +124,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
V2 = RoomVersion(
|
||||
"2",
|
||||
@@ -143,7 +143,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
V3 = RoomVersion(
|
||||
"3",
|
||||
@@ -162,7 +162,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
V4 = RoomVersion(
|
||||
"4",
|
||||
@@ -181,7 +181,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
V5 = RoomVersion(
|
||||
"5",
|
||||
@@ -200,7 +200,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
V6 = RoomVersion(
|
||||
"6",
|
||||
@@ -219,7 +219,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
MSC2176 = RoomVersion(
|
||||
"org.matrix.msc2176",
|
||||
@@ -238,7 +238,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
V7 = RoomVersion(
|
||||
"7",
|
||||
@@ -257,7 +257,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
V8 = RoomVersion(
|
||||
"8",
|
||||
@@ -276,7 +276,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
V9 = RoomVersion(
|
||||
"9",
|
||||
@@ -295,7 +295,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
MSC3787 = RoomVersion(
|
||||
"org.matrix.msc3787",
|
||||
@@ -314,7 +314,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
V10 = RoomVersion(
|
||||
"10",
|
||||
@@ -333,7 +333,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=True,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
MSC2716v4 = RoomVersion(
|
||||
"org.matrix.msc2716v4",
|
||||
@@ -352,7 +352,7 @@ class RoomVersions:
|
||||
msc2716_redactions=True,
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=[],
|
||||
msc3931_push_features=(),
|
||||
)
|
||||
MSC1767v10 = RoomVersion(
|
||||
# MSC1767 (Extensible Events) based on room version "10"
|
||||
@@ -372,7 +372,7 @@ class RoomVersions:
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=True,
|
||||
msc3931_push_features=[PushRuleRoomFlag.EXTENSIBLE_EVENTS],
|
||||
msc3931_push_features=(PushRuleRoomFlag.EXTENSIBLE_EVENTS,),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -245,7 +245,9 @@ class ApplicationService:
|
||||
return True
|
||||
|
||||
# likewise with the room's aliases (if it has any)
|
||||
alias_list = await store.get_aliases_for_room(room_id)
|
||||
alias_list = await store.get_aliases_for_room(
|
||||
room_id, on_invalidate=cache_context.invalidate
|
||||
)
|
||||
for alias in alias_list:
|
||||
if self.is_room_alias_in_namespace(alias):
|
||||
return True
|
||||
@@ -311,7 +313,9 @@ class ApplicationService:
|
||||
# Find all the rooms the sender is in
|
||||
if self.is_interested_in_user(user_id.to_string()):
|
||||
return True
|
||||
room_ids = await store.get_rooms_for_user(user_id.to_string())
|
||||
room_ids = await store.get_rooms_for_user(
|
||||
user_id.to_string(), on_invalidate=cache_context.invalidate
|
||||
)
|
||||
|
||||
# Then find out if the appservice is interested in any of those rooms
|
||||
for room_id in room_ids:
|
||||
|
||||
@@ -33,6 +33,9 @@ def validate_config(
|
||||
config: the configuration value to be validated
|
||||
config_path: the path within the config file. This will be used as a basis
|
||||
for the error message.
|
||||
|
||||
Raises:
|
||||
ConfigError, if validation fails.
|
||||
"""
|
||||
try:
|
||||
jsonschema.validate(config, json_schema)
|
||||
|
||||
@@ -13,12 +13,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Any, Iterable
|
||||
from typing import Any, Iterable, Optional, Tuple
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.config._base import Config, ConfigError
|
||||
from synapse.config._util import validate_config
|
||||
from synapse.types import JsonDict
|
||||
from synapse.types.state import StateFilter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -26,16 +27,20 @@ logger = logging.getLogger(__name__)
|
||||
class ApiConfig(Config):
|
||||
section = "api"
|
||||
|
||||
room_prejoin_state: StateFilter
|
||||
track_puppetted_users_ips: bool
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
validate_config(_MAIN_SCHEMA, config, ())
|
||||
self.room_prejoin_state = list(self._get_prejoin_state_types(config))
|
||||
self.room_prejoin_state = StateFilter.from_types(
|
||||
self._get_prejoin_state_entries(config)
|
||||
)
|
||||
self.track_puppeted_user_ips = config.get("track_puppeted_user_ips", False)
|
||||
|
||||
def _get_prejoin_state_types(self, config: JsonDict) -> Iterable[str]:
|
||||
"""Get the event types to include in the prejoin state
|
||||
|
||||
Parses the config and returns an iterable of the event types to be included.
|
||||
"""
|
||||
def _get_prejoin_state_entries(
|
||||
self, config: JsonDict
|
||||
) -> Iterable[Tuple[str, Optional[str]]]:
|
||||
"""Get the event types and state keys to include in the prejoin state."""
|
||||
room_prejoin_state_config = config.get("room_prejoin_state") or {}
|
||||
|
||||
# backwards-compatibility support for room_invite_state_types
|
||||
@@ -50,33 +55,39 @@ class ApiConfig(Config):
|
||||
|
||||
logger.warning(_ROOM_INVITE_STATE_TYPES_WARNING)
|
||||
|
||||
yield from config["room_invite_state_types"]
|
||||
for event_type in config["room_invite_state_types"]:
|
||||
yield event_type, None
|
||||
return
|
||||
|
||||
if not room_prejoin_state_config.get("disable_default_event_types"):
|
||||
yield from _DEFAULT_PREJOIN_STATE_TYPES
|
||||
yield from _DEFAULT_PREJOIN_STATE_TYPES_AND_STATE_KEYS
|
||||
|
||||
yield from room_prejoin_state_config.get("additional_event_types", [])
|
||||
for entry in room_prejoin_state_config.get("additional_event_types", []):
|
||||
if isinstance(entry, str):
|
||||
yield entry, None
|
||||
else:
|
||||
yield entry
|
||||
|
||||
|
||||
_ROOM_INVITE_STATE_TYPES_WARNING = """\
|
||||
WARNING: The 'room_invite_state_types' configuration setting is now deprecated,
|
||||
and replaced with 'room_prejoin_state'. New features may not work correctly
|
||||
unless 'room_invite_state_types' is removed. See the sample configuration file for
|
||||
details of 'room_prejoin_state'.
|
||||
unless 'room_invite_state_types' is removed. See the config documentation at
|
||||
https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#room_prejoin_state
|
||||
for details of 'room_prejoin_state'.
|
||||
--------------------------------------------------------------------------------
|
||||
"""
|
||||
|
||||
_DEFAULT_PREJOIN_STATE_TYPES = [
|
||||
EventTypes.JoinRules,
|
||||
EventTypes.CanonicalAlias,
|
||||
EventTypes.RoomAvatar,
|
||||
EventTypes.RoomEncryption,
|
||||
EventTypes.Name,
|
||||
_DEFAULT_PREJOIN_STATE_TYPES_AND_STATE_KEYS = [
|
||||
(EventTypes.JoinRules, ""),
|
||||
(EventTypes.CanonicalAlias, ""),
|
||||
(EventTypes.RoomAvatar, ""),
|
||||
(EventTypes.RoomEncryption, ""),
|
||||
(EventTypes.Name, ""),
|
||||
# Per MSC1772.
|
||||
EventTypes.Create,
|
||||
(EventTypes.Create, ""),
|
||||
# Per MSC3173.
|
||||
EventTypes.Topic,
|
||||
(EventTypes.Topic, ""),
|
||||
]
|
||||
|
||||
|
||||
@@ -90,7 +101,17 @@ _ROOM_PREJOIN_STATE_CONFIG_SCHEMA = {
|
||||
"disable_default_event_types": {"type": "boolean"},
|
||||
"additional_event_types": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"minItems": 2,
|
||||
"maxItems": 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -16,7 +16,7 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import threading
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
from typing import Any, Callable, Dict, Mapping, Optional
|
||||
|
||||
import attr
|
||||
|
||||
@@ -94,7 +94,7 @@ def add_resizable_cache(
|
||||
|
||||
class CacheConfig(Config):
|
||||
section = "caches"
|
||||
_environ = os.environ
|
||||
_environ: Mapping[str, str] = os.environ
|
||||
|
||||
event_cache_size: int
|
||||
cache_factors: Dict[str, float]
|
||||
|
||||
@@ -136,3 +136,6 @@ class ExperimentalConfig(Config):
|
||||
# Enable room version (and thus applicable push rules from MSC3931/3932)
|
||||
version_id = RoomVersions.MSC1767v10.identifier
|
||||
KNOWN_ROOM_VERSIONS[version_id] = RoomVersions.MSC1767v10
|
||||
|
||||
# MSC3391: Removing account data.
|
||||
self.msc3391_enabled = experimental.get("msc3391_enabled", False)
|
||||
|
||||
@@ -52,6 +52,7 @@ from synapse.api.room_versions import (
|
||||
KNOWN_ROOM_VERSIONS,
|
||||
EventFormatVersions,
|
||||
RoomVersion,
|
||||
RoomVersions,
|
||||
)
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.types import MutableStateMap, StateMap, UserID, get_domain_from_id
|
||||
@@ -341,19 +342,80 @@ def check_state_dependent_auth_rules(
|
||||
logger.debug("Allowing! %s", event)
|
||||
|
||||
|
||||
# Set of room versions where Synapse did not apply event key size limits
|
||||
# in bytes, but rather in codepoints.
|
||||
# In these room versions, we are more lenient with event size validation.
|
||||
LENIENT_EVENT_BYTE_LIMITS_ROOM_VERSIONS = {
|
||||
RoomVersions.V1,
|
||||
RoomVersions.V2,
|
||||
RoomVersions.V3,
|
||||
RoomVersions.V4,
|
||||
RoomVersions.V5,
|
||||
RoomVersions.V6,
|
||||
RoomVersions.MSC2176,
|
||||
RoomVersions.V7,
|
||||
RoomVersions.V8,
|
||||
RoomVersions.V9,
|
||||
RoomVersions.MSC3787,
|
||||
RoomVersions.V10,
|
||||
RoomVersions.MSC2716v4,
|
||||
RoomVersions.MSC1767v10,
|
||||
}
|
||||
|
||||
|
||||
def _check_size_limits(event: "EventBase") -> None:
|
||||
if len(event.user_id) > 255:
|
||||
raise EventSizeError("'user_id' too large")
|
||||
if len(event.room_id) > 255:
|
||||
raise EventSizeError("'room_id' too large")
|
||||
if event.is_state() and len(event.state_key) > 255:
|
||||
raise EventSizeError("'state_key' too large")
|
||||
if len(event.type) > 255:
|
||||
raise EventSizeError("'type' too large")
|
||||
if len(event.event_id) > 255:
|
||||
raise EventSizeError("'event_id' too large")
|
||||
"""
|
||||
Checks the size limits in a PDU.
|
||||
|
||||
The entire size limit of the PDU is checked first.
|
||||
Then the size of fields is checked, first in codepoints and then in bytes.
|
||||
|
||||
The codepoint size limits are only for Synapse compatibility.
|
||||
|
||||
Raises:
|
||||
EventSizeError:
|
||||
when a size limit has been violated.
|
||||
|
||||
unpersistable=True if Synapse never would have accepted the event and
|
||||
the PDU must NOT be persisted.
|
||||
|
||||
unpersistable=False if a prior version of Synapse would have accepted the
|
||||
event and so the PDU must be persisted as rejected to avoid
|
||||
breaking the room.
|
||||
"""
|
||||
|
||||
# Whole PDU check
|
||||
if len(encode_canonical_json(event.get_pdu_json())) > MAX_PDU_SIZE:
|
||||
raise EventSizeError("event too large")
|
||||
raise EventSizeError("event too large", unpersistable=True)
|
||||
|
||||
# Codepoint size check: Synapse always enforced these limits, so apply
|
||||
# them strictly.
|
||||
if len(event.user_id) > 255:
|
||||
raise EventSizeError("'user_id' too large", unpersistable=True)
|
||||
if len(event.room_id) > 255:
|
||||
raise EventSizeError("'room_id' too large", unpersistable=True)
|
||||
if event.is_state() and len(event.state_key) > 255:
|
||||
raise EventSizeError("'state_key' too large", unpersistable=True)
|
||||
if len(event.type) > 255:
|
||||
raise EventSizeError("'type' too large", unpersistable=True)
|
||||
if len(event.event_id) > 255:
|
||||
raise EventSizeError("'event_id' too large", unpersistable=True)
|
||||
|
||||
strict_byte_limits = (
|
||||
event.room_version not in LENIENT_EVENT_BYTE_LIMITS_ROOM_VERSIONS
|
||||
)
|
||||
|
||||
# Byte size check: if these fail, then be lenient to avoid breaking rooms.
|
||||
if len(event.user_id.encode("utf-8")) > 255:
|
||||
raise EventSizeError("'user_id' too large", unpersistable=strict_byte_limits)
|
||||
if len(event.room_id.encode("utf-8")) > 255:
|
||||
raise EventSizeError("'room_id' too large", unpersistable=strict_byte_limits)
|
||||
if event.is_state() and len(event.state_key.encode("utf-8")) > 255:
|
||||
raise EventSizeError("'state_key' too large", unpersistable=strict_byte_limits)
|
||||
if len(event.type.encode("utf-8")) > 255:
|
||||
raise EventSizeError("'type' too large", unpersistable=strict_byte_limits)
|
||||
if len(event.event_id.encode("utf-8")) > 255:
|
||||
raise EventSizeError("'event_id' too large", unpersistable=strict_byte_limits)
|
||||
|
||||
|
||||
def _check_create(event: "EventBase") -> None:
|
||||
|
||||
@@ -28,8 +28,8 @@ from synapse.event_auth import auth_types_for_event
|
||||
from synapse.events import EventBase, _EventInternalMetadata, make_event_from_dict
|
||||
from synapse.state import StateHandler
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import EventID, JsonDict
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import Clock
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ from synapse.types import JsonDict, StateMap
|
||||
if TYPE_CHECKING:
|
||||
from synapse.storage.controllers import StorageControllers
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types.state import StateFilter
|
||||
|
||||
|
||||
@attr.s(slots=True, auto_attribs=True)
|
||||
|
||||
@@ -28,8 +28,14 @@ from typing import (
|
||||
)
|
||||
|
||||
import attr
|
||||
from canonicaljson import encode_canonical_json
|
||||
|
||||
from synapse.api.constants import EventContentFields, EventTypes, RelationTypes
|
||||
from synapse.api.constants import (
|
||||
MAX_PDU_SIZE,
|
||||
EventContentFields,
|
||||
EventTypes,
|
||||
RelationTypes,
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.room_versions import RoomVersion
|
||||
from synapse.types import JsonDict
|
||||
@@ -674,3 +680,27 @@ def validate_canonicaljson(value: Any) -> None:
|
||||
elif not isinstance(value, (bool, str)) and value is not None:
|
||||
# Other potential JSON values (bool, None, str) are safe.
|
||||
raise SynapseError(400, "Unknown JSON value", Codes.BAD_JSON)
|
||||
|
||||
|
||||
def maybe_upsert_event_field(
|
||||
event: EventBase, container: JsonDict, key: str, value: object
|
||||
) -> bool:
|
||||
"""Upsert an event field, but only if this doesn't make the event too large.
|
||||
|
||||
Returns true iff the upsert took place.
|
||||
"""
|
||||
if key in container:
|
||||
old_value: object = container[key]
|
||||
container[key] = value
|
||||
# NB: here and below, we assume that passing a non-None `time_now` argument to
|
||||
# get_pdu_json doesn't increase the size of the encoded result.
|
||||
upsert_okay = len(encode_canonical_json(event.get_pdu_json())) <= MAX_PDU_SIZE
|
||||
if not upsert_okay:
|
||||
container[key] = old_value
|
||||
else:
|
||||
container[key] = value
|
||||
upsert_okay = len(encode_canonical_json(event.get_pdu_json())) <= MAX_PDU_SIZE
|
||||
if not upsert_okay:
|
||||
del container[key]
|
||||
|
||||
return upsert_okay
|
||||
|
||||
@@ -771,17 +771,28 @@ class FederationClient(FederationBase):
|
||||
"""
|
||||
if synapse_error is None:
|
||||
synapse_error = e.to_synapse_error()
|
||||
# There is no good way to detect an "unknown" endpoint.
|
||||
# MSC3743 specifies that servers should return a 404 or 405 with an errcode
|
||||
# of M_UNRECOGNIZED when they receive a request to an unknown endpoint or
|
||||
# to an unknown method, respectively.
|
||||
#
|
||||
# Dendrite returns a 404 (with a body of "404 page not found");
|
||||
# Conduit returns a 404 (with no body); and Synapse returns a 400
|
||||
# with M_UNRECOGNIZED.
|
||||
#
|
||||
# This needs to be rather specific as some endpoints truly do return 404
|
||||
# errors.
|
||||
# Older versions of servers don't properly handle this. This needs to be
|
||||
# rather specific as some endpoints truly do return 404 errors.
|
||||
return (
|
||||
e.code == 404 and (not e.response or e.response == b"404 page not found")
|
||||
) or (e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED)
|
||||
# 404 is an unknown endpoint, 405 is a known endpoint, but unknown method.
|
||||
(e.code == 404 or e.code == 405)
|
||||
and (
|
||||
# Older Dendrites returned a text or empty body.
|
||||
# Older Conduit returned an empty body.
|
||||
not e.response
|
||||
or e.response == b"404 page not found"
|
||||
# The proper response JSON with M_UNRECOGNIZED errcode.
|
||||
or synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
) or (
|
||||
# Older Synapses returned a 400 error.
|
||||
e.code == 400
|
||||
and synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
|
||||
async def _try_destination_list(
|
||||
self,
|
||||
|
||||
@@ -641,7 +641,7 @@ class PerDestinationQueue:
|
||||
if not message_id:
|
||||
continue
|
||||
|
||||
set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
|
||||
set_tag(SynapseTags.TO_DEVICE_EDU_ID, message_id)
|
||||
|
||||
edus = [
|
||||
Edu(
|
||||
|
||||
@@ -17,10 +17,12 @@ import random
|
||||
from typing import TYPE_CHECKING, Awaitable, Callable, Collection, List, Optional, Tuple
|
||||
|
||||
from synapse.replication.http.account_data import (
|
||||
ReplicationAddRoomAccountDataRestServlet,
|
||||
ReplicationAddTagRestServlet,
|
||||
ReplicationAddUserAccountDataRestServlet,
|
||||
ReplicationRemoveRoomAccountDataRestServlet,
|
||||
ReplicationRemoveTagRestServlet,
|
||||
ReplicationRoomAccountDataRestServlet,
|
||||
ReplicationUserAccountDataRestServlet,
|
||||
ReplicationRemoveUserAccountDataRestServlet,
|
||||
)
|
||||
from synapse.streams import EventSource
|
||||
from synapse.types import JsonDict, StreamKeyType, UserID
|
||||
@@ -41,8 +43,18 @@ class AccountDataHandler:
|
||||
self._instance_name = hs.get_instance_name()
|
||||
self._notifier = hs.get_notifier()
|
||||
|
||||
self._user_data_client = ReplicationUserAccountDataRestServlet.make_client(hs)
|
||||
self._room_data_client = ReplicationRoomAccountDataRestServlet.make_client(hs)
|
||||
self._add_user_data_client = (
|
||||
ReplicationAddUserAccountDataRestServlet.make_client(hs)
|
||||
)
|
||||
self._remove_user_data_client = (
|
||||
ReplicationRemoveUserAccountDataRestServlet.make_client(hs)
|
||||
)
|
||||
self._add_room_data_client = (
|
||||
ReplicationAddRoomAccountDataRestServlet.make_client(hs)
|
||||
)
|
||||
self._remove_room_data_client = (
|
||||
ReplicationRemoveRoomAccountDataRestServlet.make_client(hs)
|
||||
)
|
||||
self._add_tag_client = ReplicationAddTagRestServlet.make_client(hs)
|
||||
self._remove_tag_client = ReplicationRemoveTagRestServlet.make_client(hs)
|
||||
self._account_data_writers = hs.config.worker.writers.account_data
|
||||
@@ -112,7 +124,7 @@ class AccountDataHandler:
|
||||
|
||||
return max_stream_id
|
||||
else:
|
||||
response = await self._room_data_client(
|
||||
response = await self._add_room_data_client(
|
||||
instance_name=random.choice(self._account_data_writers),
|
||||
user_id=user_id,
|
||||
room_id=room_id,
|
||||
@@ -121,15 +133,59 @@ class AccountDataHandler:
|
||||
)
|
||||
return response["max_stream_id"]
|
||||
|
||||
async def remove_account_data_for_room(
|
||||
self, user_id: str, room_id: str, account_data_type: str
|
||||
) -> Optional[int]:
|
||||
"""
|
||||
Deletes the room account data for the given user and account data type.
|
||||
|
||||
"Deleting" account data merely means setting the content of the account data
|
||||
to an empty JSON object: {}.
|
||||
|
||||
Args:
|
||||
user_id: The user ID to remove room account data for.
|
||||
room_id: The room ID to target.
|
||||
account_data_type: The account data type to remove.
|
||||
|
||||
Returns:
|
||||
The maximum stream ID, or None if the room account data item did not exist.
|
||||
"""
|
||||
if self._instance_name in self._account_data_writers:
|
||||
max_stream_id = await self._store.remove_account_data_for_room(
|
||||
user_id, room_id, account_data_type
|
||||
)
|
||||
if max_stream_id is None:
|
||||
# The referenced account data did not exist, so no delete occurred.
|
||||
return None
|
||||
|
||||
self._notifier.on_new_event(
|
||||
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
|
||||
)
|
||||
|
||||
# Notify Synapse modules that the content of the type has changed to an
|
||||
# empty dictionary.
|
||||
await self._notify_modules(user_id, room_id, account_data_type, {})
|
||||
|
||||
return max_stream_id
|
||||
else:
|
||||
response = await self._remove_room_data_client(
|
||||
instance_name=random.choice(self._account_data_writers),
|
||||
user_id=user_id,
|
||||
room_id=room_id,
|
||||
account_data_type=account_data_type,
|
||||
content={},
|
||||
)
|
||||
return response["max_stream_id"]
|
||||
|
||||
async def add_account_data_for_user(
|
||||
self, user_id: str, account_data_type: str, content: JsonDict
|
||||
) -> int:
|
||||
"""Add some global account_data for a user.
|
||||
|
||||
Args:
|
||||
user_id: The user to add a tag for.
|
||||
user_id: The user to add some account data for.
|
||||
account_data_type: The type of account_data to add.
|
||||
content: A json object to associate with the tag.
|
||||
content: The content json dictionary.
|
||||
|
||||
Returns:
|
||||
The maximum stream ID.
|
||||
@@ -148,7 +204,7 @@ class AccountDataHandler:
|
||||
|
||||
return max_stream_id
|
||||
else:
|
||||
response = await self._user_data_client(
|
||||
response = await self._add_user_data_client(
|
||||
instance_name=random.choice(self._account_data_writers),
|
||||
user_id=user_id,
|
||||
account_data_type=account_data_type,
|
||||
@@ -156,6 +212,45 @@ class AccountDataHandler:
|
||||
)
|
||||
return response["max_stream_id"]
|
||||
|
||||
async def remove_account_data_for_user(
|
||||
self, user_id: str, account_data_type: str
|
||||
) -> Optional[int]:
|
||||
"""Removes a piece of global account_data for a user.
|
||||
|
||||
Args:
|
||||
user_id: The user to remove account data for.
|
||||
account_data_type: The type of account_data to remove.
|
||||
|
||||
Returns:
|
||||
The maximum stream ID, or None if the room account data item did not exist.
|
||||
"""
|
||||
|
||||
if self._instance_name in self._account_data_writers:
|
||||
max_stream_id = await self._store.remove_account_data_for_user(
|
||||
user_id, account_data_type
|
||||
)
|
||||
if max_stream_id is None:
|
||||
# The referenced account data did not exist, so no delete occurred.
|
||||
return None
|
||||
|
||||
self._notifier.on_new_event(
|
||||
StreamKeyType.ACCOUNT_DATA, max_stream_id, users=[user_id]
|
||||
)
|
||||
|
||||
# Notify Synapse modules that the content of the type has changed to an
|
||||
# empty dictionary.
|
||||
await self._notify_modules(user_id, None, account_data_type, {})
|
||||
|
||||
return max_stream_id
|
||||
else:
|
||||
response = await self._remove_user_data_client(
|
||||
instance_name=random.choice(self._account_data_writers),
|
||||
user_id=user_id,
|
||||
account_data_type=account_data_type,
|
||||
content={},
|
||||
)
|
||||
return response["max_stream_id"]
|
||||
|
||||
async def add_tag_to_room(
|
||||
self, user_id: str, room_id: str, tag: str, content: JsonDict
|
||||
) -> int:
|
||||
|
||||
@@ -578,9 +578,6 @@ class ApplicationServicesHandler:
|
||||
device_id,
|
||||
), messages in recipient_device_to_messages.items():
|
||||
for message_json in messages:
|
||||
# Remove 'message_id' from the to-device message, as it's an internal ID
|
||||
message_json.pop("message_id", None)
|
||||
|
||||
message_payload.append(
|
||||
{
|
||||
"to_user_id": user_id,
|
||||
@@ -615,8 +612,8 @@ class ApplicationServicesHandler:
|
||||
)
|
||||
|
||||
# Fetch the users who have modified their device list since then.
|
||||
users_with_changed_device_lists = (
|
||||
await self.store.get_users_whose_devices_changed(from_key, to_key=new_key)
|
||||
users_with_changed_device_lists = await self.store.get_all_devices_changed(
|
||||
from_key, to_key=new_key
|
||||
)
|
||||
|
||||
# Filter out any users the application service is not interested in
|
||||
|
||||
@@ -2031,7 +2031,7 @@ class PasswordAuthProvider:
|
||||
self.is_3pid_allowed_callbacks: List[IS_3PID_ALLOWED_CALLBACK] = []
|
||||
|
||||
# Mapping from login type to login parameters
|
||||
self._supported_login_types: Dict[str, Iterable[str]] = {}
|
||||
self._supported_login_types: Dict[str, Tuple[str, ...]] = {}
|
||||
|
||||
# Mapping from login type to auth checker callbacks
|
||||
self.auth_checker_callbacks: Dict[str, List[CHECK_AUTH_CALLBACK]] = {}
|
||||
|
||||
@@ -996,7 +996,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
|
||||
# Check if we are partially joining any rooms. If so we need to store
|
||||
# all device list updates so that we can handle them correctly once we
|
||||
# know who is in the room.
|
||||
# TODO(faster joins): this fetches and processes a bunch of data that we don't
|
||||
# TODO(faster_joins): this fetches and processes a bunch of data that we don't
|
||||
# use. Could be replaced by a tighter query e.g.
|
||||
# SELECT EXISTS(SELECT 1 FROM partial_state_rooms)
|
||||
partial_rooms = await self.store.get_partial_state_room_resync_info()
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict
|
||||
|
||||
from synapse.api.constants import EduTypes, ToDeviceEventTypes
|
||||
from synapse.api.constants import EduTypes, EventContentFields, ToDeviceEventTypes
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.logging.context import run_in_background
|
||||
@@ -216,14 +216,24 @@ class DeviceMessageHandler:
|
||||
"""
|
||||
sender_user_id = requester.user.to_string()
|
||||
|
||||
message_id = random_string(16)
|
||||
set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
|
||||
|
||||
log_kv({"number_of_to_device_messages": len(messages)})
|
||||
set_tag("sender", sender_user_id)
|
||||
set_tag(SynapseTags.TO_DEVICE_TYPE, message_type)
|
||||
set_tag(SynapseTags.TO_DEVICE_SENDER, sender_user_id)
|
||||
local_messages = {}
|
||||
remote_messages: Dict[str, Dict[str, Dict[str, JsonDict]]] = {}
|
||||
for user_id, by_device in messages.items():
|
||||
# add an opentracing log entry for each message
|
||||
for device_id, message_content in by_device.items():
|
||||
log_kv(
|
||||
{
|
||||
"event": "send_to_device_message",
|
||||
"user_id": user_id,
|
||||
"device_id": device_id,
|
||||
EventContentFields.TO_DEVICE_MSGID: message_content.get(
|
||||
EventContentFields.TO_DEVICE_MSGID
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
# Ratelimit local cross-user key requests by the sending device.
|
||||
if (
|
||||
message_type == ToDeviceEventTypes.RoomKeyRequest
|
||||
@@ -233,6 +243,7 @@ class DeviceMessageHandler:
|
||||
requester, (sender_user_id, requester.device_id)
|
||||
)
|
||||
if not allowed:
|
||||
log_kv({"message": f"dropping key requests to {user_id}"})
|
||||
logger.info(
|
||||
"Dropping room_key_request from %s to %s due to rate limit",
|
||||
sender_user_id,
|
||||
@@ -247,18 +258,11 @@ class DeviceMessageHandler:
|
||||
"content": message_content,
|
||||
"type": message_type,
|
||||
"sender": sender_user_id,
|
||||
"message_id": message_id,
|
||||
}
|
||||
for device_id, message_content in by_device.items()
|
||||
}
|
||||
if messages_by_device:
|
||||
local_messages[user_id] = messages_by_device
|
||||
log_kv(
|
||||
{
|
||||
"user_id": user_id,
|
||||
"device_id": list(messages_by_device),
|
||||
}
|
||||
)
|
||||
else:
|
||||
destination = get_domain_from_id(user_id)
|
||||
remote_messages.setdefault(destination, {})[user_id] = by_device
|
||||
@@ -267,7 +271,11 @@ class DeviceMessageHandler:
|
||||
|
||||
remote_edu_contents = {}
|
||||
for destination, messages in remote_messages.items():
|
||||
log_kv({"destination": destination})
|
||||
# The EDU contains a "message_id" property which is used for
|
||||
# idempotence. Make up a random one.
|
||||
message_id = random_string(16)
|
||||
log_kv({"destination": destination, "message_id": message_id})
|
||||
|
||||
remote_edu_contents[destination] = {
|
||||
"messages": messages,
|
||||
"sender": sender_user_id,
|
||||
|
||||
@@ -70,8 +70,8 @@ from synapse.replication.http.federation import (
|
||||
)
|
||||
from synapse.storage.databases.main.events import PartialStateConflictError
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import JsonDict, get_domain_from_id
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
from synapse.visibility import filter_events_for_server
|
||||
@@ -152,6 +152,7 @@ class FederationHandler:
|
||||
self._federation_event_handler = hs.get_federation_event_handler()
|
||||
self._device_handler = hs.get_device_handler()
|
||||
self._bulk_push_rule_evaluator = hs.get_bulk_push_rule_evaluator()
|
||||
self._notifier = hs.get_notifier()
|
||||
|
||||
self._clean_room_for_join_client = ReplicationCleanRoomRestServlet.make_client(
|
||||
hs
|
||||
@@ -1342,32 +1343,53 @@ class FederationHandler:
|
||||
)
|
||||
|
||||
EventValidator().validate_builder(builder)
|
||||
event, context = await self.event_creation_handler.create_new_client_event(
|
||||
builder=builder
|
||||
)
|
||||
|
||||
event, context = await self.add_display_name_to_third_party_invite(
|
||||
room_version_obj, event_dict, event, context
|
||||
)
|
||||
# Try several times, it could fail with PartialStateConflictError
|
||||
# in send_membership_event, cf comment in except block.
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
(
|
||||
event,
|
||||
context,
|
||||
) = await self.event_creation_handler.create_new_client_event(
|
||||
builder=builder
|
||||
)
|
||||
|
||||
EventValidator().validate_new(event, self.config)
|
||||
event, context = await self.add_display_name_to_third_party_invite(
|
||||
room_version_obj, event_dict, event, context
|
||||
)
|
||||
|
||||
# We need to tell the transaction queue to send this out, even
|
||||
# though the sender isn't a local user.
|
||||
event.internal_metadata.send_on_behalf_of = self.hs.hostname
|
||||
EventValidator().validate_new(event, self.config)
|
||||
|
||||
try:
|
||||
validate_event_for_room_version(event)
|
||||
await self._event_auth_handler.check_auth_rules_from_context(event)
|
||||
except AuthError as e:
|
||||
logger.warning("Denying new third party invite %r because %s", event, e)
|
||||
raise e
|
||||
# We need to tell the transaction queue to send this out, even
|
||||
# though the sender isn't a local user.
|
||||
event.internal_metadata.send_on_behalf_of = self.hs.hostname
|
||||
|
||||
await self._check_signature(event, context)
|
||||
try:
|
||||
validate_event_for_room_version(event)
|
||||
await self._event_auth_handler.check_auth_rules_from_context(
|
||||
event
|
||||
)
|
||||
except AuthError as e:
|
||||
logger.warning(
|
||||
"Denying new third party invite %r because %s", event, e
|
||||
)
|
||||
raise e
|
||||
|
||||
# We retrieve the room member handler here as to not cause a cyclic dependency
|
||||
member_handler = self.hs.get_room_member_handler()
|
||||
await member_handler.send_membership_event(None, event, context)
|
||||
await self._check_signature(event, context)
|
||||
|
||||
# We retrieve the room member handler here as to not cause a cyclic dependency
|
||||
member_handler = self.hs.get_room_member_handler()
|
||||
await member_handler.send_membership_event(None, event, context)
|
||||
|
||||
break
|
||||
except PartialStateConflictError as e:
|
||||
# Persisting couldn't happen because the room got un-partial stated
|
||||
# in the meantime and context needs to be recomputed, so let's do so.
|
||||
if i == max_retries - 1:
|
||||
raise e
|
||||
pass
|
||||
else:
|
||||
destinations = {x.split(":", 1)[-1] for x in (sender_user_id, room_id)}
|
||||
|
||||
@@ -1399,28 +1421,46 @@ class FederationHandler:
|
||||
room_version_obj, event_dict
|
||||
)
|
||||
|
||||
event, context = await self.event_creation_handler.create_new_client_event(
|
||||
builder=builder
|
||||
)
|
||||
event, context = await self.add_display_name_to_third_party_invite(
|
||||
room_version_obj, event_dict, event, context
|
||||
)
|
||||
# Try several times, it could fail with PartialStateConflictError
|
||||
# in send_membership_event, cf comment in except block.
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
(
|
||||
event,
|
||||
context,
|
||||
) = await self.event_creation_handler.create_new_client_event(
|
||||
builder=builder
|
||||
)
|
||||
event, context = await self.add_display_name_to_third_party_invite(
|
||||
room_version_obj, event_dict, event, context
|
||||
)
|
||||
|
||||
try:
|
||||
validate_event_for_room_version(event)
|
||||
await self._event_auth_handler.check_auth_rules_from_context(event)
|
||||
except AuthError as e:
|
||||
logger.warning("Denying third party invite %r because %s", event, e)
|
||||
raise e
|
||||
await self._check_signature(event, context)
|
||||
try:
|
||||
validate_event_for_room_version(event)
|
||||
await self._event_auth_handler.check_auth_rules_from_context(event)
|
||||
except AuthError as e:
|
||||
logger.warning("Denying third party invite %r because %s", event, e)
|
||||
raise e
|
||||
await self._check_signature(event, context)
|
||||
|
||||
# We need to tell the transaction queue to send this out, even
|
||||
# though the sender isn't a local user.
|
||||
event.internal_metadata.send_on_behalf_of = get_domain_from_id(event.sender)
|
||||
# We need to tell the transaction queue to send this out, even
|
||||
# though the sender isn't a local user.
|
||||
event.internal_metadata.send_on_behalf_of = get_domain_from_id(
|
||||
event.sender
|
||||
)
|
||||
|
||||
# We retrieve the room member handler here as to not cause a cyclic dependency
|
||||
member_handler = self.hs.get_room_member_handler()
|
||||
await member_handler.send_membership_event(None, event, context)
|
||||
# We retrieve the room member handler here as to not cause a cyclic dependency
|
||||
member_handler = self.hs.get_room_member_handler()
|
||||
await member_handler.send_membership_event(None, event, context)
|
||||
|
||||
break
|
||||
except PartialStateConflictError as e:
|
||||
# Persisting couldn't happen because the room got un-partial stated
|
||||
# in the meantime and context needs to be recomputed, so let's do so.
|
||||
if i == max_retries - 1:
|
||||
raise e
|
||||
pass
|
||||
|
||||
async def add_display_name_to_third_party_invite(
|
||||
self,
|
||||
@@ -1692,6 +1732,9 @@ class FederationHandler:
|
||||
self._storage_controllers.state.notify_room_un_partial_stated(
|
||||
room_id
|
||||
)
|
||||
# Poke the notifier so that other workers see the write to
|
||||
# the un-partial-stated rooms stream.
|
||||
self._notifier.notify_replication()
|
||||
|
||||
# TODO(faster_joins) update room stats and user directory?
|
||||
# https://github.com/matrix-org/synapse/issues/12814
|
||||
|
||||
@@ -43,6 +43,7 @@ from synapse.api.constants import (
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
EventSizeError,
|
||||
FederationError,
|
||||
FederationPullAttemptBackoffError,
|
||||
HttpResponseException,
|
||||
@@ -75,7 +76,6 @@ from synapse.replication.http.federation import (
|
||||
from synapse.state import StateResolutionStore
|
||||
from synapse.storage.databases.main.events import PartialStateConflictError
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
PersistedEventPosition,
|
||||
RoomStreamToken,
|
||||
@@ -83,6 +83,7 @@ from synapse.types import (
|
||||
UserID,
|
||||
get_domain_from_id,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import Linearizer, concurrently_execute
|
||||
from synapse.util.iterutils import batch_iter
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
@@ -609,6 +610,8 @@ class FederationEventHandler:
|
||||
self._state_storage_controller.notify_event_un_partial_stated(
|
||||
event.event_id
|
||||
)
|
||||
# Notify that there's a new row in the un_partial_stated_events stream.
|
||||
self._notifier.notify_replication()
|
||||
|
||||
@trace
|
||||
async def backfill(
|
||||
@@ -1736,6 +1739,15 @@ class FederationEventHandler:
|
||||
except AuthError as e:
|
||||
logger.warning("Rejecting %r because %s", event, e)
|
||||
context.rejected = RejectedReason.AUTH_ERROR
|
||||
except EventSizeError as e:
|
||||
if e.unpersistable:
|
||||
# This event is completely unpersistable.
|
||||
raise e
|
||||
# Otherwise, we are somewhat lenient and just persist the event
|
||||
# as rejected, for moderate compatibility with older Synapse
|
||||
# versions.
|
||||
logger.warning("While validating received event %r: %s", event, e)
|
||||
context.rejected = RejectedReason.OVERSIZED_EVENT
|
||||
|
||||
events_and_contexts_to_persist.append((event, context))
|
||||
|
||||
@@ -1781,6 +1793,16 @@ class FederationEventHandler:
|
||||
# TODO: use a different rejected reason here?
|
||||
context.rejected = RejectedReason.AUTH_ERROR
|
||||
return
|
||||
except EventSizeError as e:
|
||||
if e.unpersistable:
|
||||
# This event is completely unpersistable.
|
||||
raise e
|
||||
# Otherwise, we are somewhat lenient and just persist the event
|
||||
# as rejected, for moderate compatibility with older Synapse
|
||||
# versions.
|
||||
logger.warning("While validating received event %r: %s", event, e)
|
||||
context.rejected = RejectedReason.OVERSIZED_EVENT
|
||||
return
|
||||
|
||||
# next, check that we have all of the event's auth events.
|
||||
#
|
||||
|
||||
@@ -37,7 +37,6 @@ from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
ConsentNotGivenError,
|
||||
LimitExceededError,
|
||||
NotFoundError,
|
||||
ShadowBanError,
|
||||
SynapseError,
|
||||
@@ -50,6 +49,7 @@ from synapse.event_auth import validate_event_for_room_version
|
||||
from synapse.events import EventBase, relation_from_event
|
||||
from synapse.events.builder import EventBuilder
|
||||
from synapse.events.snapshot import EventContext
|
||||
from synapse.events.utils import maybe_upsert_event_field
|
||||
from synapse.events.validator import EventValidator
|
||||
from synapse.handlers.directory import DirectoryHandler
|
||||
from synapse.logging import opentracing
|
||||
@@ -59,7 +59,6 @@ from synapse.replication.http.send_event import ReplicationSendEventRestServlet
|
||||
from synapse.replication.http.send_events import ReplicationSendEventsRestServlet
|
||||
from synapse.storage.databases.main.events import PartialStateConflictError
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
MutableStateMap,
|
||||
PersistedEventPosition,
|
||||
@@ -70,6 +69,7 @@ from synapse.types import (
|
||||
UserID,
|
||||
create_requester,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import json_decoder, json_encoder, log_failure, unwrapFirstError
|
||||
from synapse.util.async_helpers import Linearizer, gather_results
|
||||
from synapse.util.caches.expiringcache import ExpiringCache
|
||||
@@ -998,60 +998,73 @@ class EventCreationHandler:
|
||||
event.internal_metadata.stream_ordering,
|
||||
)
|
||||
|
||||
event, context = await self.create_event(
|
||||
requester,
|
||||
event_dict,
|
||||
txn_id=txn_id,
|
||||
allow_no_prev_events=allow_no_prev_events,
|
||||
prev_event_ids=prev_event_ids,
|
||||
state_event_ids=state_event_ids,
|
||||
outlier=outlier,
|
||||
historical=historical,
|
||||
depth=depth,
|
||||
)
|
||||
|
||||
assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % (
|
||||
event.sender,
|
||||
)
|
||||
|
||||
spam_check_result = await self.spam_checker.check_event_for_spam(event)
|
||||
if spam_check_result != self.spam_checker.NOT_SPAM:
|
||||
if isinstance(spam_check_result, tuple):
|
||||
try:
|
||||
[code, dict] = spam_check_result
|
||||
raise SynapseError(
|
||||
403,
|
||||
"This message had been rejected as probable spam",
|
||||
code,
|
||||
dict,
|
||||
)
|
||||
except ValueError:
|
||||
logger.error(
|
||||
"Spam-check module returned invalid error value. Expecting [code, dict], got %s",
|
||||
spam_check_result,
|
||||
)
|
||||
|
||||
raise SynapseError(
|
||||
403,
|
||||
"This message has been rejected as probable spam",
|
||||
Codes.FORBIDDEN,
|
||||
)
|
||||
|
||||
# Backwards compatibility: if the return value is not an error code, it
|
||||
# means the module returned an error message to be included in the
|
||||
# SynapseError (which is now deprecated).
|
||||
raise SynapseError(
|
||||
403,
|
||||
spam_check_result,
|
||||
Codes.FORBIDDEN,
|
||||
# Try several times, it could fail with PartialStateConflictError
|
||||
# in handle_new_client_event, cf comment in except block.
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
event, context = await self.create_event(
|
||||
requester,
|
||||
event_dict,
|
||||
txn_id=txn_id,
|
||||
allow_no_prev_events=allow_no_prev_events,
|
||||
prev_event_ids=prev_event_ids,
|
||||
state_event_ids=state_event_ids,
|
||||
outlier=outlier,
|
||||
historical=historical,
|
||||
depth=depth,
|
||||
)
|
||||
|
||||
ev = await self.handle_new_client_event(
|
||||
requester=requester,
|
||||
events_and_context=[(event, context)],
|
||||
ratelimit=ratelimit,
|
||||
ignore_shadow_ban=ignore_shadow_ban,
|
||||
)
|
||||
assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % (
|
||||
event.sender,
|
||||
)
|
||||
|
||||
spam_check_result = await self.spam_checker.check_event_for_spam(event)
|
||||
if spam_check_result != self.spam_checker.NOT_SPAM:
|
||||
if isinstance(spam_check_result, tuple):
|
||||
try:
|
||||
[code, dict] = spam_check_result
|
||||
raise SynapseError(
|
||||
403,
|
||||
"This message had been rejected as probable spam",
|
||||
code,
|
||||
dict,
|
||||
)
|
||||
except ValueError:
|
||||
logger.error(
|
||||
"Spam-check module returned invalid error value. Expecting [code, dict], got %s",
|
||||
spam_check_result,
|
||||
)
|
||||
|
||||
raise SynapseError(
|
||||
403,
|
||||
"This message has been rejected as probable spam",
|
||||
Codes.FORBIDDEN,
|
||||
)
|
||||
|
||||
# Backwards compatibility: if the return value is not an error code, it
|
||||
# means the module returned an error message to be included in the
|
||||
# SynapseError (which is now deprecated).
|
||||
raise SynapseError(
|
||||
403,
|
||||
spam_check_result,
|
||||
Codes.FORBIDDEN,
|
||||
)
|
||||
|
||||
ev = await self.handle_new_client_event(
|
||||
requester=requester,
|
||||
events_and_context=[(event, context)],
|
||||
ratelimit=ratelimit,
|
||||
ignore_shadow_ban=ignore_shadow_ban,
|
||||
)
|
||||
|
||||
break
|
||||
except PartialStateConflictError as e:
|
||||
# Persisting couldn't happen because the room got un-partial stated
|
||||
# in the meantime and context needs to be recomputed, so let's do so.
|
||||
if i == max_retries - 1:
|
||||
raise e
|
||||
pass
|
||||
|
||||
# we know it was persisted, so must have a stream ordering
|
||||
assert ev.internal_metadata.stream_ordering
|
||||
@@ -1355,7 +1368,7 @@ class EventCreationHandler:
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester has been shadow-banned.
|
||||
SynapseError(503) if attempting to persist a partial state event in
|
||||
PartialStateConflictError if attempting to persist a partial state event in
|
||||
a room that has been un-partial stated.
|
||||
"""
|
||||
extra_users = extra_users or []
|
||||
@@ -1417,34 +1430,23 @@ class EventCreationHandler:
|
||||
# We now persist the event (and update the cache in parallel, since we
|
||||
# don't want to block on it).
|
||||
event, context = events_and_context[0]
|
||||
try:
|
||||
result, _ = await make_deferred_yieldable(
|
||||
gather_results(
|
||||
(
|
||||
run_in_background(
|
||||
self._persist_events,
|
||||
requester=requester,
|
||||
events_and_context=events_and_context,
|
||||
ratelimit=ratelimit,
|
||||
extra_users=extra_users,
|
||||
),
|
||||
run_in_background(
|
||||
self.cache_joined_hosts_for_events, events_and_context
|
||||
).addErrback(
|
||||
log_failure, "cache_joined_hosts_for_event failed"
|
||||
),
|
||||
result, _ = await make_deferred_yieldable(
|
||||
gather_results(
|
||||
(
|
||||
run_in_background(
|
||||
self._persist_events,
|
||||
requester=requester,
|
||||
events_and_context=events_and_context,
|
||||
ratelimit=ratelimit,
|
||||
extra_users=extra_users,
|
||||
),
|
||||
consumeErrors=True,
|
||||
)
|
||||
).addErrback(unwrapFirstError)
|
||||
except PartialStateConflictError as e:
|
||||
# The event context needs to be recomputed.
|
||||
# Turn the error into a 429, as a hint to the client to try again.
|
||||
logger.info(
|
||||
"Room %s was un-partial stated while persisting client event.",
|
||||
event.room_id,
|
||||
run_in_background(
|
||||
self.cache_joined_hosts_for_events, events_and_context
|
||||
).addErrback(log_failure, "cache_joined_hosts_for_event failed"),
|
||||
),
|
||||
consumeErrors=True,
|
||||
)
|
||||
raise LimitExceededError(msg=e.msg, errcode=e.errcode, retry_after_ms=0)
|
||||
).addErrback(unwrapFirstError)
|
||||
|
||||
return result
|
||||
|
||||
@@ -1739,12 +1741,15 @@ class EventCreationHandler:
|
||||
|
||||
if event.type == EventTypes.Member:
|
||||
if event.content["membership"] == Membership.INVITE:
|
||||
event.unsigned[
|
||||
"invite_room_state"
|
||||
] = await self.store.get_stripped_room_state_from_event_context(
|
||||
context,
|
||||
self.room_prejoin_state_types,
|
||||
membership_user_id=event.sender,
|
||||
maybe_upsert_event_field(
|
||||
event,
|
||||
event.unsigned,
|
||||
"invite_room_state",
|
||||
await self.store.get_stripped_room_state_from_event_context(
|
||||
context,
|
||||
self.room_prejoin_state_types,
|
||||
membership_user_id=event.sender,
|
||||
),
|
||||
)
|
||||
|
||||
invitee = UserID.from_string(event.state_key)
|
||||
@@ -1762,11 +1767,14 @@ class EventCreationHandler:
|
||||
event.signatures.update(returned_invite.signatures)
|
||||
|
||||
if event.content["membership"] == Membership.KNOCK:
|
||||
event.unsigned[
|
||||
"knock_room_state"
|
||||
] = await self.store.get_stripped_room_state_from_event_context(
|
||||
context,
|
||||
self.room_prejoin_state_types,
|
||||
maybe_upsert_event_field(
|
||||
event,
|
||||
event.unsigned,
|
||||
"knock_room_state",
|
||||
await self.store.get_stripped_room_state_from_event_context(
|
||||
context,
|
||||
self.room_prejoin_state_types,
|
||||
),
|
||||
)
|
||||
|
||||
if event.type == EventTypes.Redaction:
|
||||
@@ -2005,26 +2013,39 @@ class EventCreationHandler:
|
||||
for user_id in members:
|
||||
requester = create_requester(user_id, authenticated_entity=self.server_name)
|
||||
try:
|
||||
event, context = await self.create_event(
|
||||
requester,
|
||||
{
|
||||
"type": EventTypes.Dummy,
|
||||
"content": {},
|
||||
"room_id": room_id,
|
||||
"sender": user_id,
|
||||
},
|
||||
)
|
||||
# Try several times, it could fail with PartialStateConflictError
|
||||
# in handle_new_client_event, cf comment in except block.
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
event, context = await self.create_event(
|
||||
requester,
|
||||
{
|
||||
"type": EventTypes.Dummy,
|
||||
"content": {},
|
||||
"room_id": room_id,
|
||||
"sender": user_id,
|
||||
},
|
||||
)
|
||||
|
||||
event.internal_metadata.proactively_send = False
|
||||
event.internal_metadata.proactively_send = False
|
||||
|
||||
# Since this is a dummy-event it is OK if it is sent by a
|
||||
# shadow-banned user.
|
||||
await self.handle_new_client_event(
|
||||
requester,
|
||||
events_and_context=[(event, context)],
|
||||
ratelimit=False,
|
||||
ignore_shadow_ban=True,
|
||||
)
|
||||
# Since this is a dummy-event it is OK if it is sent by a
|
||||
# shadow-banned user.
|
||||
await self.handle_new_client_event(
|
||||
requester,
|
||||
events_and_context=[(event, context)],
|
||||
ratelimit=False,
|
||||
ignore_shadow_ban=True,
|
||||
)
|
||||
|
||||
break
|
||||
except PartialStateConflictError as e:
|
||||
# Persisting couldn't happen because the room got un-partial stated
|
||||
# in the meantime and context needs to be recomputed, so let's do so.
|
||||
if i == max_retries - 1:
|
||||
raise e
|
||||
pass
|
||||
return True
|
||||
except AuthError:
|
||||
logger.info(
|
||||
|
||||
@@ -27,9 +27,9 @@ from synapse.handlers.room import ShutdownRoomResponse
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.rest.admin._base import assert_user_is_admin
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict, Requester, StreamKeyType
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import ReadWriteLock
|
||||
from synapse.util.stringutils import random_string
|
||||
from synapse.visibility import filter_events_for_client
|
||||
|
||||
@@ -1692,10 +1692,12 @@ class PresenceEventSource(EventSource[int, UserPresenceState]):
|
||||
|
||||
if from_key is not None:
|
||||
# First get all users that have had a presence update
|
||||
updated_users = stream_change_cache.get_all_entities_changed(from_key)
|
||||
result = stream_change_cache.get_all_entities_changed(from_key)
|
||||
|
||||
# Cross-reference users we're interested in with those that have had updates.
|
||||
if updated_users is not None:
|
||||
if result.hit:
|
||||
updated_users = result.entities
|
||||
|
||||
# If we have the full list of changes for presence we can
|
||||
# simply check which ones share a room with the user.
|
||||
get_updates_counter.labels("stream").inc()
|
||||
@@ -1767,9 +1769,9 @@ class PresenceEventSource(EventSource[int, UserPresenceState]):
|
||||
updated_users = None
|
||||
if from_key:
|
||||
# Only return updates since the last sync
|
||||
updated_users = self.store.presence_stream_cache.get_all_entities_changed(
|
||||
from_key
|
||||
)
|
||||
result = self.store.presence_stream_cache.get_all_entities_changed(from_key)
|
||||
if result.hit:
|
||||
updated_users = result.entities
|
||||
|
||||
if updated_users is not None:
|
||||
# Get the actual presence update for each change
|
||||
|
||||
@@ -46,8 +46,8 @@ from synapse.replication.http.register import (
|
||||
ReplicationRegisterServlet,
|
||||
)
|
||||
from synapse.spam_checker_api import RegistrationBehaviour
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import RoomAlias, UserID, create_requester
|
||||
from synapse.types.state import StateFilter
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
@@ -62,7 +62,7 @@ from synapse.events.utils import copy_and_fixup_power_levels_contents
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
from synapse.module_api import NOT_SPAM
|
||||
from synapse.rest.admin._base import assert_user_is_admin
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.storage.databases.main.events import PartialStateConflictError
|
||||
from synapse.streams import EventSource
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
@@ -77,6 +77,7 @@ from synapse.types import (
|
||||
UserID,
|
||||
create_requester,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import stringutils
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
from synapse.util.stringutils import parse_and_validate_server_name
|
||||
@@ -207,46 +208,64 @@ class RoomCreationHandler:
|
||||
|
||||
new_room_id = self._generate_room_id()
|
||||
|
||||
# Check whether the user has the power level to carry out the upgrade.
|
||||
# `check_auth_rules_from_context` will check that they are in the room and have
|
||||
# the required power level to send the tombstone event.
|
||||
(
|
||||
tombstone_event,
|
||||
tombstone_context,
|
||||
) = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
{
|
||||
"type": EventTypes.Tombstone,
|
||||
"state_key": "",
|
||||
"room_id": old_room_id,
|
||||
"sender": user_id,
|
||||
"content": {
|
||||
"body": "This room has been replaced",
|
||||
"replacement_room": new_room_id,
|
||||
},
|
||||
},
|
||||
)
|
||||
validate_event_for_room_version(tombstone_event)
|
||||
await self._event_auth_handler.check_auth_rules_from_context(tombstone_event)
|
||||
# Try several times, it could fail with PartialStateConflictError
|
||||
# in _upgrade_room, cf comment in except block.
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
# Check whether the user has the power level to carry out the upgrade.
|
||||
# `check_auth_rules_from_context` will check that they are in the room and have
|
||||
# the required power level to send the tombstone event.
|
||||
(
|
||||
tombstone_event,
|
||||
tombstone_context,
|
||||
) = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
{
|
||||
"type": EventTypes.Tombstone,
|
||||
"state_key": "",
|
||||
"room_id": old_room_id,
|
||||
"sender": user_id,
|
||||
"content": {
|
||||
"body": "This room has been replaced",
|
||||
"replacement_room": new_room_id,
|
||||
},
|
||||
},
|
||||
)
|
||||
validate_event_for_room_version(tombstone_event)
|
||||
await self._event_auth_handler.check_auth_rules_from_context(
|
||||
tombstone_event
|
||||
)
|
||||
|
||||
# Upgrade the room
|
||||
#
|
||||
# If this user has sent multiple upgrade requests for the same room
|
||||
# and one of them is not complete yet, cache the response and
|
||||
# return it to all subsequent requests
|
||||
ret = await self._upgrade_response_cache.wrap(
|
||||
(old_room_id, user_id),
|
||||
self._upgrade_room,
|
||||
requester,
|
||||
old_room_id,
|
||||
old_room, # args for _upgrade_room
|
||||
new_room_id,
|
||||
new_version,
|
||||
tombstone_event,
|
||||
tombstone_context,
|
||||
)
|
||||
# Upgrade the room
|
||||
#
|
||||
# If this user has sent multiple upgrade requests for the same room
|
||||
# and one of them is not complete yet, cache the response and
|
||||
# return it to all subsequent requests
|
||||
ret = await self._upgrade_response_cache.wrap(
|
||||
(old_room_id, user_id),
|
||||
self._upgrade_room,
|
||||
requester,
|
||||
old_room_id,
|
||||
old_room, # args for _upgrade_room
|
||||
new_room_id,
|
||||
new_version,
|
||||
tombstone_event,
|
||||
tombstone_context,
|
||||
)
|
||||
|
||||
return ret
|
||||
return ret
|
||||
except PartialStateConflictError as e:
|
||||
# Clean up the cache so we can retry properly
|
||||
self._upgrade_response_cache.unset((old_room_id, user_id))
|
||||
# Persisting couldn't happen because the room got un-partial stated
|
||||
# in the meantime and context needs to be recomputed, so let's do so.
|
||||
if i == max_retries - 1:
|
||||
raise e
|
||||
pass
|
||||
|
||||
# This is to satisfy mypy and should never happen
|
||||
raise PartialStateConflictError()
|
||||
|
||||
async def _upgrade_room(
|
||||
self,
|
||||
|
||||
@@ -375,6 +375,8 @@ class RoomBatchHandler:
|
||||
# Events are sorted by (topological_ordering, stream_ordering)
|
||||
# where topological_ordering is just depth.
|
||||
for (event, context) in reversed(events_to_persist):
|
||||
# This call can't raise `PartialStateConflictError` since we forbid
|
||||
# use of the historical batch API during partial state
|
||||
await self.event_creation_handler.handle_new_client_event(
|
||||
await self.create_requester_for_user_id_from_app_service(
|
||||
event.sender, app_service_requester.app_service
|
||||
|
||||
@@ -34,7 +34,7 @@ from synapse.events.snapshot import EventContext
|
||||
from synapse.handlers.profile import MAX_AVATAR_URL_LEN, MAX_DISPLAYNAME_LEN
|
||||
from synapse.logging import opentracing
|
||||
from synapse.module_api import NOT_SPAM
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.storage.databases.main.events import PartialStateConflictError
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
Requester,
|
||||
@@ -45,6 +45,7 @@ from synapse.types import (
|
||||
create_requester,
|
||||
get_domain_from_id,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.distributor import user_left_room
|
||||
|
||||
@@ -392,60 +393,81 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
event_pos = await self.store.get_position_for_event(existing_event_id)
|
||||
return existing_event_id, event_pos.stream
|
||||
|
||||
event, context = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
{
|
||||
"type": EventTypes.Member,
|
||||
"content": content,
|
||||
"room_id": room_id,
|
||||
"sender": requester.user.to_string(),
|
||||
"state_key": user_id,
|
||||
# For backwards compatibility:
|
||||
"membership": membership,
|
||||
"origin_server_ts": origin_server_ts,
|
||||
},
|
||||
txn_id=txn_id,
|
||||
allow_no_prev_events=allow_no_prev_events,
|
||||
prev_event_ids=prev_event_ids,
|
||||
state_event_ids=state_event_ids,
|
||||
depth=depth,
|
||||
require_consent=require_consent,
|
||||
outlier=outlier,
|
||||
historical=historical,
|
||||
)
|
||||
|
||||
prev_state_ids = await context.get_prev_state_ids(
|
||||
StateFilter.from_types([(EventTypes.Member, None)])
|
||||
)
|
||||
|
||||
prev_member_event_id = prev_state_ids.get((EventTypes.Member, user_id), None)
|
||||
|
||||
if event.membership == Membership.JOIN:
|
||||
newly_joined = True
|
||||
if prev_member_event_id:
|
||||
prev_member_event = await self.store.get_event(prev_member_event_id)
|
||||
newly_joined = prev_member_event.membership != Membership.JOIN
|
||||
|
||||
# Only rate-limit if the user actually joined the room, otherwise we'll end
|
||||
# up blocking profile updates.
|
||||
if newly_joined and ratelimit:
|
||||
await self._join_rate_limiter_local.ratelimit(requester)
|
||||
await self._join_rate_per_room_limiter.ratelimit(
|
||||
requester, key=room_id, update=False
|
||||
# Try several times, it could fail with PartialStateConflictError,
|
||||
# in handle_new_client_event, cf comment in except block.
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
event, context = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
{
|
||||
"type": EventTypes.Member,
|
||||
"content": content,
|
||||
"room_id": room_id,
|
||||
"sender": requester.user.to_string(),
|
||||
"state_key": user_id,
|
||||
# For backwards compatibility:
|
||||
"membership": membership,
|
||||
"origin_server_ts": origin_server_ts,
|
||||
},
|
||||
txn_id=txn_id,
|
||||
allow_no_prev_events=allow_no_prev_events,
|
||||
prev_event_ids=prev_event_ids,
|
||||
state_event_ids=state_event_ids,
|
||||
depth=depth,
|
||||
require_consent=require_consent,
|
||||
outlier=outlier,
|
||||
historical=historical,
|
||||
)
|
||||
with opentracing.start_active_span("handle_new_client_event"):
|
||||
result_event = await self.event_creation_handler.handle_new_client_event(
|
||||
requester,
|
||||
events_and_context=[(event, context)],
|
||||
extra_users=[target],
|
||||
ratelimit=ratelimit,
|
||||
)
|
||||
|
||||
if event.membership == Membership.LEAVE:
|
||||
if prev_member_event_id:
|
||||
prev_member_event = await self.store.get_event(prev_member_event_id)
|
||||
if prev_member_event.membership == Membership.JOIN:
|
||||
await self._user_left_room(target, room_id)
|
||||
prev_state_ids = await context.get_prev_state_ids(
|
||||
StateFilter.from_types([(EventTypes.Member, None)])
|
||||
)
|
||||
|
||||
prev_member_event_id = prev_state_ids.get(
|
||||
(EventTypes.Member, user_id), None
|
||||
)
|
||||
|
||||
if event.membership == Membership.JOIN:
|
||||
newly_joined = True
|
||||
if prev_member_event_id:
|
||||
prev_member_event = await self.store.get_event(
|
||||
prev_member_event_id
|
||||
)
|
||||
newly_joined = prev_member_event.membership != Membership.JOIN
|
||||
|
||||
# Only rate-limit if the user actually joined the room, otherwise we'll end
|
||||
# up blocking profile updates.
|
||||
if newly_joined and ratelimit:
|
||||
await self._join_rate_limiter_local.ratelimit(requester)
|
||||
await self._join_rate_per_room_limiter.ratelimit(
|
||||
requester, key=room_id, update=False
|
||||
)
|
||||
with opentracing.start_active_span("handle_new_client_event"):
|
||||
result_event = (
|
||||
await self.event_creation_handler.handle_new_client_event(
|
||||
requester,
|
||||
events_and_context=[(event, context)],
|
||||
extra_users=[target],
|
||||
ratelimit=ratelimit,
|
||||
)
|
||||
)
|
||||
|
||||
if event.membership == Membership.LEAVE:
|
||||
if prev_member_event_id:
|
||||
prev_member_event = await self.store.get_event(
|
||||
prev_member_event_id
|
||||
)
|
||||
if prev_member_event.membership == Membership.JOIN:
|
||||
await self._user_left_room(target, room_id)
|
||||
|
||||
break
|
||||
except PartialStateConflictError as e:
|
||||
# Persisting couldn't happen because the room got un-partial stated
|
||||
# in the meantime and context needs to be recomputed, so let's do so.
|
||||
if i == max_retries - 1:
|
||||
raise e
|
||||
pass
|
||||
|
||||
# we know it was persisted, so should have a stream ordering
|
||||
assert result_event.internal_metadata.stream_ordering
|
||||
@@ -1234,6 +1256,8 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
ratelimit: Whether to rate limit this request.
|
||||
Raises:
|
||||
SynapseError if there was a problem changing the membership.
|
||||
PartialStateConflictError: if attempting to persist a partial state event in
|
||||
a room that has been un-partial stated.
|
||||
"""
|
||||
target_user = UserID.from_string(event.state_key)
|
||||
room_id = event.room_id
|
||||
@@ -1863,21 +1887,37 @@ class RoomMemberMasterHandler(RoomMemberHandler):
|
||||
list(previous_membership_event.auth_event_ids()) + prev_event_ids
|
||||
)
|
||||
|
||||
event, context = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
event_dict,
|
||||
txn_id=txn_id,
|
||||
prev_event_ids=prev_event_ids,
|
||||
auth_event_ids=auth_event_ids,
|
||||
outlier=True,
|
||||
)
|
||||
event.internal_metadata.out_of_band_membership = True
|
||||
# Try several times, it could fail with PartialStateConflictError
|
||||
# in handle_new_client_event, cf comment in except block.
|
||||
max_retries = 5
|
||||
for i in range(max_retries):
|
||||
try:
|
||||
event, context = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
event_dict,
|
||||
txn_id=txn_id,
|
||||
prev_event_ids=prev_event_ids,
|
||||
auth_event_ids=auth_event_ids,
|
||||
outlier=True,
|
||||
)
|
||||
event.internal_metadata.out_of_band_membership = True
|
||||
|
||||
result_event = (
|
||||
await self.event_creation_handler.handle_new_client_event(
|
||||
requester,
|
||||
events_and_context=[(event, context)],
|
||||
extra_users=[UserID.from_string(target_user)],
|
||||
)
|
||||
)
|
||||
|
||||
break
|
||||
except PartialStateConflictError as e:
|
||||
# Persisting couldn't happen because the room got un-partial stated
|
||||
# in the meantime and context needs to be recomputed, so let's do so.
|
||||
if i == max_retries - 1:
|
||||
raise e
|
||||
pass
|
||||
|
||||
result_event = await self.event_creation_handler.handle_new_client_event(
|
||||
requester,
|
||||
events_and_context=[(event, context)],
|
||||
extra_users=[UserID.from_string(target_user)],
|
||||
)
|
||||
# we know it was persisted, so must have a stream ordering
|
||||
assert result_event.internal_metadata.stream_ordering
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Set,
|
||||
import attr
|
||||
|
||||
from synapse.api.constants import (
|
||||
EventContentFields,
|
||||
EventTypes,
|
||||
HistoryVisibility,
|
||||
JoinRules,
|
||||
@@ -701,13 +700,6 @@ class RoomSummaryHandler:
|
||||
# there should always be an entry
|
||||
assert stats is not None, "unable to retrieve stats for %s" % (room_id,)
|
||||
|
||||
current_state_ids = await self._storage_controllers.state.get_current_state_ids(
|
||||
room_id
|
||||
)
|
||||
create_event = await self._store.get_event(
|
||||
current_state_ids[(EventTypes.Create, "")]
|
||||
)
|
||||
|
||||
entry = {
|
||||
"room_id": stats["room_id"],
|
||||
"name": stats["name"],
|
||||
@@ -720,7 +712,7 @@ class RoomSummaryHandler:
|
||||
stats["history_visibility"] == HistoryVisibility.WORLD_READABLE
|
||||
),
|
||||
"guest_can_join": stats["guest_access"] == "can_join",
|
||||
"room_type": create_event.content.get(EventContentFields.ROOM_TYPE),
|
||||
"room_type": stats["room_type"],
|
||||
}
|
||||
|
||||
if self._msc3266_enabled:
|
||||
@@ -730,7 +722,11 @@ class RoomSummaryHandler:
|
||||
# Federation requests need to provide additional information so the
|
||||
# requested server is able to filter the response appropriately.
|
||||
if for_federation:
|
||||
current_state_ids = (
|
||||
await self._storage_controllers.state.get_current_state_ids(room_id)
|
||||
)
|
||||
room_version = await self._store.get_room_version(room_id)
|
||||
|
||||
if await self._event_auth_handler.has_restricted_join_rules(
|
||||
current_state_ids, room_version
|
||||
):
|
||||
|
||||
@@ -23,8 +23,8 @@ from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.errors import NotFoundError, SynapseError
|
||||
from synapse.api.filtering import Filter
|
||||
from synapse.events import EventBase
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import JsonDict, StreamKeyType, UserID
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.visibility import filter_events_for_client
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -31,19 +31,24 @@ from typing import (
|
||||
import attr
|
||||
from prometheus_client import Counter
|
||||
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.constants import EventContentFields, EventTypes, Membership
|
||||
from synapse.api.filtering import FilterCollection
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.events import EventBase
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
from synapse.logging.context import current_context
|
||||
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span
|
||||
from synapse.logging.opentracing import (
|
||||
SynapseTags,
|
||||
log_kv,
|
||||
set_tag,
|
||||
start_active_span,
|
||||
trace,
|
||||
)
|
||||
from synapse.push.clientformat import format_push_rules_for_user
|
||||
from synapse.storage.databases.main.event_push_actions import RoomNotifCounts
|
||||
from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary
|
||||
from synapse.storage.roommember import MemberSummary
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
DeviceListUpdates,
|
||||
JsonDict,
|
||||
@@ -55,6 +60,7 @@ from synapse.types import (
|
||||
StreamToken,
|
||||
UserID,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import concurrently_execute
|
||||
from synapse.util.caches.expiringcache import ExpiringCache
|
||||
from synapse.util.caches.lrucache import LruCache
|
||||
@@ -1528,10 +1534,12 @@ class SyncHandler:
|
||||
#
|
||||
# If we don't have that info cached then we get all the users that
|
||||
# share a room with our user and check if those users have changed.
|
||||
changed_users = self.store.get_cached_device_list_changes(
|
||||
cache_result = self.store.get_cached_device_list_changes(
|
||||
since_token.device_list_key
|
||||
)
|
||||
if changed_users is not None:
|
||||
if cache_result.hit:
|
||||
changed_users = cache_result.entities
|
||||
|
||||
result = await self.store.get_rooms_for_users(changed_users)
|
||||
|
||||
for changed_user_id, entries in result.items():
|
||||
@@ -1584,6 +1592,7 @@ class SyncHandler:
|
||||
else:
|
||||
return DeviceListUpdates()
|
||||
|
||||
@trace
|
||||
async def _generate_sync_entry_for_to_device(
|
||||
self, sync_result_builder: "SyncResultBuilder"
|
||||
) -> None:
|
||||
@@ -1603,11 +1612,16 @@ class SyncHandler:
|
||||
)
|
||||
|
||||
for message in messages:
|
||||
# We pop here as we shouldn't be sending the message ID down
|
||||
# `/sync`
|
||||
message_id = message.pop("message_id", None)
|
||||
if message_id:
|
||||
set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
|
||||
log_kv(
|
||||
{
|
||||
"event": "to_device_message",
|
||||
"sender": message["sender"],
|
||||
"type": message["type"],
|
||||
EventContentFields.TO_DEVICE_MSGID: message["content"].get(
|
||||
EventContentFields.TO_DEVICE_MSGID
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Returning %d to-device messages between %d and %d (current token: %d)",
|
||||
|
||||
@@ -420,11 +420,11 @@ class TypingWriterHandler(FollowerTypingHandler):
|
||||
if last_id == current_id:
|
||||
return [], current_id, False
|
||||
|
||||
changed_rooms: Optional[
|
||||
Iterable[str]
|
||||
] = self._typing_stream_change_cache.get_all_entities_changed(last_id)
|
||||
result = self._typing_stream_change_cache.get_all_entities_changed(last_id)
|
||||
|
||||
if changed_rooms is None:
|
||||
if result.hit:
|
||||
changed_rooms: Iterable[str] = result.entities
|
||||
else:
|
||||
changed_rooms = self._room_serials
|
||||
|
||||
rows = []
|
||||
|
||||
@@ -577,7 +577,24 @@ def _unrecognised_request_handler(request: Request) -> NoReturn:
|
||||
Args:
|
||||
request: Unused, but passed in to match the signature of ServletCallback.
|
||||
"""
|
||||
raise UnrecognizedRequestError()
|
||||
raise UnrecognizedRequestError(code=404)
|
||||
|
||||
|
||||
class UnrecognizedRequestResource(resource.Resource):
|
||||
"""
|
||||
Similar to twisted.web.resource.NoResource, but returns a JSON 404 with an
|
||||
errcode of M_UNRECOGNIZED.
|
||||
"""
|
||||
|
||||
def render(self, request: SynapseRequest) -> int:
|
||||
f = failure.Failure(UnrecognizedRequestError(code=404))
|
||||
return_json_error(f, request, None)
|
||||
# A response has already been sent but Twisted requires either NOT_DONE_YET
|
||||
# or the response bytes as a return value.
|
||||
return NOT_DONE_YET
|
||||
|
||||
def getChild(self, name: str, request: Request) -> resource.Resource:
|
||||
return self
|
||||
|
||||
|
||||
class RootRedirect(resource.Resource):
|
||||
|
||||
@@ -292,8 +292,15 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SynapseTags:
|
||||
# The message ID of any to_device message processed
|
||||
TO_DEVICE_MESSAGE_ID = "to_device.message_id"
|
||||
# The message ID of any to_device EDU processed
|
||||
TO_DEVICE_EDU_ID = "to_device.edu_id"
|
||||
|
||||
# Details about to-device messages
|
||||
TO_DEVICE_TYPE = "to_device.type"
|
||||
TO_DEVICE_SENDER = "to_device.sender"
|
||||
TO_DEVICE_RECIPIENT = "to_device.recipient"
|
||||
TO_DEVICE_RECIPIENT_DEVICE = "to_device.recipient_device"
|
||||
TO_DEVICE_MSGID = "to_device.msgid" # client-generated ID
|
||||
|
||||
# Whether the sync response has new data to be returned to the client.
|
||||
SYNC_RESULT = "sync.new_data"
|
||||
|
||||
@@ -111,7 +111,6 @@ from synapse.storage.background_updates import (
|
||||
)
|
||||
from synapse.storage.database import DatabasePool, LoggingTransaction
|
||||
from synapse.storage.databases.main.roommember import ProfileInfo
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
DomainSpecificString,
|
||||
JsonDict,
|
||||
@@ -124,6 +123,7 @@ from synapse.types import (
|
||||
UserProfile,
|
||||
create_requester,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import Clock
|
||||
from synapse.util.async_helpers import maybe_awaitable
|
||||
from synapse.util.caches.descriptors import CachedFunction, cached
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user