Compare commits
5 Commits
mv/unbind-
...
erikj/tree
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dcd574b89c | ||
|
|
c03785e121 | ||
|
|
b9cdf3d85e | ||
|
|
18ac015ecd | ||
|
|
4874d6320a |
@@ -21,7 +21,7 @@ endblock
|
||||
|
||||
block Install Complement Dependencies
|
||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||
go install -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
go get -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
endblock
|
||||
|
||||
block Install custom gotestfmt template
|
||||
|
||||
10
.github/workflows/tests.yml
vendored
10
.github/workflows/tests.yml
vendored
@@ -197,12 +197,8 @@ jobs:
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
# 1. Mount postgres data files onto a tmpfs in-memory filesystem to reduce overhead of docker's overlayfs layer.
|
||||
# 2. Expose the unix socket for postgres. This removes latency of using docker-proxy for connections.
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
--tmpfs /var/lib/postgres:rw,size=6144m \
|
||||
--mount 'type=bind,src=/var/run/postgresql,dst=/var/run/postgresql' \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
@@ -224,10 +220,10 @@ jobs:
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=6 tests
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: /var/run/postgresql
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
@@ -296,7 +292,7 @@ jobs:
|
||||
python-version: '3.7'
|
||||
extras: "all test"
|
||||
|
||||
- run: poetry run trial -j6 tests
|
||||
- run: poetry run trial -j2 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
|
||||
12
CHANGES.md
12
CHANGES.md
@@ -1,14 +1,8 @@
|
||||
Synapse 1.73.0 (2022-12-06)
|
||||
===========================
|
||||
|
||||
Please note that legacy Prometheus metric names have been removed in this release; see [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.73/docs/upgrade.md#legacy-prometheus-metric-names-have-now-been-removed) for more details.
|
||||
|
||||
No significant changes since 1.73.0rc2.
|
||||
|
||||
|
||||
Synapse 1.73.0rc2 (2022-12-01)
|
||||
==============================
|
||||
|
||||
Please note that legacy Prometheus metric names have been removed in this release; see [the upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.73/docs/upgrade.md#legacy-prometheus-metric-names-have-now-been-removed) for more details.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
@@ -23,7 +17,7 @@ Features
|
||||
|
||||
- Speed-up `/messages` with `filter_events_for_client` optimizations. ([\#14527](https://github.com/matrix-org/synapse/issues/14527))
|
||||
- Improve DB performance by reducing amount of data that gets read in `device_lists_changes_in_room`. ([\#14534](https://github.com/matrix-org/synapse/issues/14534))
|
||||
- Add support for handling avatar in SSO OIDC login. Contributed by @ashfame. ([\#13917](https://github.com/matrix-org/synapse/issues/13917))
|
||||
- Adds support for handling avatar in SSO login. Contributed by @ashfame. ([\#13917](https://github.com/matrix-org/synapse/issues/13917))
|
||||
- Move MSC3030 `/timestamp_to_event` endpoints to stable `v1` location (`/_matrix/client/v1/rooms/<roomID>/timestamp_to_event?ts=<timestamp>&dir=<direction>`, `/_matrix/federation/v1/timestamp_to_event/<roomID>?ts=<timestamp>&dir=<direction>`). ([\#14471](https://github.com/matrix-org/synapse/issues/14471))
|
||||
- Reduce database load of [Client-Server endpoints](https://spec.matrix.org/v1.5/client-server-api/#aggregations) which return bundled aggregations. ([\#14491](https://github.com/matrix-org/synapse/issues/14491), [\#14508](https://github.com/matrix-org/synapse/issues/14508), [\#14510](https://github.com/matrix-org/synapse/issues/14510))
|
||||
- Add unstable support for an Extensible Events room version (`org.matrix.msc1767.10`) via [MSC1767](https://github.com/matrix-org/matrix-spec-proposals/pull/1767), [MSC3931](https://github.com/matrix-org/matrix-spec-proposals/pull/3931), [MSC3932](https://github.com/matrix-org/matrix-spec-proposals/pull/3932), and [MSC3933](https://github.com/matrix-org/matrix-spec-proposals/pull/3933). ([\#14520](https://github.com/matrix-org/synapse/issues/14520), [\#14521](https://github.com/matrix-org/synapse/issues/14521), [\#14524](https://github.com/matrix-org/synapse/issues/14524))
|
||||
|
||||
8
Cargo.lock
generated
8
Cargo.lock
generated
@@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.150"
|
||||
version = "1.0.148"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e326c9ec8042f1b5da33252c8a37e9ffbd2c9bef0155215b6e6c80c790e05f91"
|
||||
checksum = "e53f64bb4ba0191d6d0676e1b141ca55047d83b74f5607e6d8eb88126c52c2dc"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.150"
|
||||
version = "1.0.148"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42a3df25b0713732468deadad63ab9da1f1fd75a48a15024b50363f128db627e"
|
||||
checksum = "a55492425aa53521babf6137309e7d34c20bbfbbfcfe2c7f3a047fd1f6b92c0c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -3,3 +3,7 @@
|
||||
|
||||
[workspace]
|
||||
members = ["rust"]
|
||||
|
||||
[profile.dbgrelease]
|
||||
inherits = "release"
|
||||
debug = true
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Add a module callback for unbinding a 3PID.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where a device list update might not be sent to clients in certain circumstances.
|
||||
@@ -1 +0,0 @@
|
||||
Improve user search for international display names.
|
||||
@@ -1 +0,0 @@
|
||||
Faster remote room joins: stream the un-partial-stating of rooms over replication.
|
||||
@@ -1 +0,0 @@
|
||||
Faster remote room joins: stream the un-partial-stating of rooms over replication.
|
||||
@@ -1 +0,0 @@
|
||||
Add `--editable` flag to `complement.sh` which uses an editable install of Synapse for faster turn-around times whilst developing iteratively.
|
||||
@@ -1 +0,0 @@
|
||||
Remove old, incorrect minimum postgres version note and replace with a link to the [Dependency Deprecation Policy](https://matrix-org.github.io/synapse/v1.73/deprecation_policy.html).
|
||||
@@ -1 +0,0 @@
|
||||
Add Single-Sign On setup instructions for Mastodon-based instances.
|
||||
@@ -1 +0,0 @@
|
||||
Improve opentracing and logging for to-device message handling.
|
||||
@@ -1 +0,0 @@
|
||||
Suppress a spurious warning when `POST /rooms/<room_id>/<membership>/`, `POST /join/<room_id_or_alias`, or the unspecced `PUT /join/<room_id_or_alias>/<txn_id>` receive an empty HTTP request body.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where a device list update might not be sent to clients in certain circumstances.
|
||||
@@ -1 +0,0 @@
|
||||
Alter some unit test environment parameters to decrease time spent running tests.
|
||||
@@ -1 +0,0 @@
|
||||
Switch to Go recommended installation method for `gotestfmt` template in CI.
|
||||
@@ -1 +0,0 @@
|
||||
Bump phonenumbers from 8.13.0 to 8.13.1.
|
||||
@@ -1 +0,0 @@
|
||||
Bump types-setuptools from 65.5.0.3 to 65.6.0.1.
|
||||
@@ -1 +0,0 @@
|
||||
Bump twine from 4.0.1 to 4.0.2.
|
||||
@@ -1 +0,0 @@
|
||||
Bump types-requests from 2.28.11.2 to 2.28.11.5.
|
||||
@@ -1 +0,0 @@
|
||||
Bump cryptography from 38.0.3 to 38.0.4.
|
||||
@@ -1 +0,0 @@
|
||||
Add new `push.enabled` config option to allow opting out of push notification calculation.
|
||||
@@ -1 +0,0 @@
|
||||
Return spec-compliant JSON errors when unknown endpoints are requested.
|
||||
@@ -1 +0,0 @@
|
||||
Return spec-compliant JSON errors when unknown endpoints are requested.
|
||||
@@ -1 +0,0 @@
|
||||
Fix html templates to load images only on HTTPS. Contributed by @ashfame.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where the user directory would return 1 more row than requested.
|
||||
@@ -1 +0,0 @@
|
||||
Reject invalid read receipt requests with empty room or event IDs. Contributed by Nick @ Beeper (@fizzadar).
|
||||
@@ -1 +0,0 @@
|
||||
Change `turn_allow_guests` example value to lowercase `true`.
|
||||
@@ -1 +0,0 @@
|
||||
Remove useless cargo install with apt from Dockerfile.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in v1.67.0 where not specifying a config file or a server URL would lead to the `register_new_matrix_user` script failing.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where the user directory and room/user stats might be out of sync.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where the user directory and room/user stats might be out of sync.
|
||||
@@ -1 +0,0 @@
|
||||
Bump certifi from 2021.10.8 to 2022.12.7.
|
||||
@@ -1 +0,0 @@
|
||||
Add missing type hints.
|
||||
@@ -1,2 +0,0 @@
|
||||
Fix a bug introduced in Synapse 1.72.0 where the background updates to add non-thread unique indexes on receipts would fail if they were previously interrupted.
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Bump flake8-bugbear from 22.10.27 to 22.12.6.
|
||||
@@ -1 +0,0 @@
|
||||
Bump packaging from 21.3 to 22.0.
|
||||
@@ -1 +0,0 @@
|
||||
Bump types-pillow from 9.3.0.1 to 9.3.0.4.
|
||||
@@ -1 +0,0 @@
|
||||
Bump serde from 1.0.148 to 1.0.150.
|
||||
@@ -1 +0,0 @@
|
||||
Bump phonenumbers from 8.13.1 to 8.13.2.
|
||||
@@ -1 +0,0 @@
|
||||
Bump authlib from 1.1.0 to 1.2.0.
|
||||
@@ -1 +0,0 @@
|
||||
(remove from changelog: unreleased) Revert the deletion of stale devices due to performance issues.
|
||||
@@ -1 +0,0 @@
|
||||
Move `StateFilter` to `synapse.types`.
|
||||
13
debian/changelog
vendored
13
debian/changelog
vendored
@@ -1,16 +1,3 @@
|
||||
matrix-synapse-py3 (1.74.0~rc1) UNRELEASED; urgency=medium
|
||||
|
||||
* New dependency on libicu-dev to provide improved results for user
|
||||
search.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Dec 2022 15:28:10 +0000
|
||||
|
||||
matrix-synapse-py3 (1.73.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.73.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Dec 2022 11:48:56 +0000
|
||||
|
||||
matrix-synapse-py3 (1.73.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.73.0rc2.
|
||||
|
||||
2
debian/control
vendored
2
debian/control
vendored
@@ -8,8 +8,6 @@ Build-Depends:
|
||||
dh-virtualenv (>= 1.1),
|
||||
libsystemd-dev,
|
||||
libpq-dev,
|
||||
libicu-dev,
|
||||
pkg-config,
|
||||
lsb-release,
|
||||
python3-dev,
|
||||
python3,
|
||||
|
||||
@@ -43,7 +43,7 @@ RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential git libffi-dev libssl-dev \
|
||||
build-essential cargo git libffi-dev libssl-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
@@ -97,8 +97,6 @@ RUN \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
libicu-dev \
|
||||
pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
|
||||
@@ -84,8 +84,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
python3-venv \
|
||||
sqlite3 \
|
||||
libpq-dev \
|
||||
libicu-dev \
|
||||
pkg-config \
|
||||
xmlsec1
|
||||
|
||||
# Install rust and ensure it's in the PATH
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
|
||||
# first of all, we create a base image with an nginx which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
@@ -24,7 +23,7 @@ FROM debian:bullseye-slim AS deps_base
|
||||
FROM redis:6-bullseye AS redis_base
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM $FROM
|
||||
FROM matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
|
||||
@@ -7,9 +7,8 @@
|
||||
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
|
||||
FROM $FROM
|
||||
FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
# First of all, we copy postgres server from the official postgres image,
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# This dockerfile builds an editable install of Synapse.
|
||||
#
|
||||
# Used by `complement.sh`. Not suitable for production use.
|
||||
|
||||
ARG PYTHON_VERSION=3.9
|
||||
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bullseye.
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
||||
|
||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||
# install the OS build deps
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libpq-dev \
|
||||
libssl-dev \
|
||||
libwebp-dev \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
openssl \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
gosu \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp6 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
|
||||
# Make a base copy of the editable source tree, so that we have something to
|
||||
# install and build now — even though it's going to be covered up by a mount
|
||||
# at runtime.
|
||||
COPY synapse /editable-src/synapse/
|
||||
COPY rust /editable-src/rust/
|
||||
# ... and what we need to `pip install`.
|
||||
COPY pyproject.toml poetry.lock README.rst build_rust.py Cargo.toml Cargo.lock /editable-src/
|
||||
|
||||
RUN pip install poetry
|
||||
RUN poetry config virtualenvs.create false
|
||||
RUN cd /editable-src && poetry install --extras all
|
||||
|
||||
# Make copies of useful things for inspection:
|
||||
# - the Rust module (must be copied to the editable source tree before startup)
|
||||
# - poetry.lock is useful for checking if dependencies have changed.
|
||||
RUN cp /editable-src/synapse/synapse_rust.abi3.so /synapse_rust.abi3.so.bak
|
||||
RUN cp /editable-src/poetry.lock /poetry.lock.bak
|
||||
|
||||
|
||||
### Extra setup from original Dockerfile
|
||||
COPY ./docker/start.py /start.py
|
||||
COPY ./docker/conf /conf
|
||||
|
||||
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||
|
||||
ENTRYPOINT ["/start.py"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
@@ -265,33 +265,6 @@ server_.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `unbind_threepid`
|
||||
|
||||
_First introduced in Synapse v1.74.0_
|
||||
|
||||
```python
|
||||
async def unbind_threepid(
|
||||
user_id: str, medium: str, address: str, identity_server: str
|
||||
) -> Tuple[bool, bool]:
|
||||
```
|
||||
|
||||
Called before a threepid association is removed.
|
||||
|
||||
The module is given the Matrix ID of the user to which an association is to be removed,
|
||||
as well as the medium (`email` or `msisdn`), address of the third-party identifier and
|
||||
the identity server where the threepid was successfully registered.
|
||||
|
||||
A module can hence do its own custom unbinding, if for example it did also registered a custom
|
||||
binding logic with `on_threepid_bind`.
|
||||
|
||||
It should return a tuple of 2 booleans:
|
||||
- first one should be `True` on a success calling the identity server, otherwise `False` if
|
||||
the identity server doesn't support unbinding (or no identity server found to contact).
|
||||
- second one should be `True` if unbind needs to stop there. In this case no other module
|
||||
unbind will be called, and the default unbind made to the IS that was used on bind will also be
|
||||
skipped. In any case the mapping will be removed from the Synapse 3pid remote table,
|
||||
except if an Exception was raised at some point.
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the third-party rules callback
|
||||
|
||||
@@ -590,44 +590,3 @@ oidc_providers:
|
||||
display_name_template: "{{ user.first_name }} {{ user.last_name }}"
|
||||
email_template: "{{ user.email }}"
|
||||
```
|
||||
|
||||
### Mastodon
|
||||
|
||||
[Mastodon](https://docs.joinmastodon.org/) instances provide an [OAuth API](https://docs.joinmastodon.org/spec/oauth/), allowing those instances to be used as a single sign-on provider for Synapse.
|
||||
|
||||
The first step is to register Synapse as an application with your Mastodon instance, using the [Create an application API](https://docs.joinmastodon.org/methods/apps/#create) (see also [here](https://docs.joinmastodon.org/client/token/)). There are several ways to do this, but in the example below we are using CURL.
|
||||
|
||||
This example assumes that:
|
||||
* the Mastodon instance website URL is `https://your.mastodon.instance.url`, and
|
||||
* Synapse will be registered as an app named `my_synapse_app`.
|
||||
|
||||
Send the following request, substituting the value of `synapse_public_baseurl` from your Synapse installation.
|
||||
```sh
|
||||
curl -d "client_name=my_synapse_app&redirect_uris=https://[synapse_public_baseurl]/_synapse/client/oidc/callback" -X POST https://your.mastodon.instance.url/api/v1/apps
|
||||
```
|
||||
|
||||
You should receive a response similar to the following. Make sure to save it.
|
||||
```json
|
||||
{"client_id":"someclientid_123","client_secret":"someclientsecret_123","id":"12345","name":"my_synapse_app","redirect_uri":"https://[synapse_public_baseurl]/_synapse/client/oidc/callback","website":null,"vapid_key":"somerandomvapidkey_123"}
|
||||
```
|
||||
|
||||
As the Synapse login mechanism needs an attribute to uniquely identify users, and Mastodon's endpoint does not return a `sub` property, an alternative `subject_claim` has to be set. Your Synapse configuration should include the following:
|
||||
|
||||
```yaml
|
||||
oidc_providers:
|
||||
- idp_id: my_mastodon
|
||||
idp_name: "Mastodon Instance Example"
|
||||
discover: false
|
||||
issuer: "https://your.mastodon.instance.url/@admin"
|
||||
client_id: "someclientid_123"
|
||||
client_secret: "someclientsecret_123"
|
||||
authorization_endpoint: "https://your.mastodon.instance.url/oauth/authorize"
|
||||
token_endpoint: "https://your.mastodon.instance.url/oauth/token"
|
||||
userinfo_endpoint: "https://your.mastodon.instance.url/api/v1/accounts/verify_credentials"
|
||||
scopes: ["read"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_claim: "id"
|
||||
```
|
||||
|
||||
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# Using Postgres
|
||||
|
||||
The minimum supported version of PostgreSQL is determined by the [Dependency
|
||||
Deprecation Policy](deprecation_policy.md).
|
||||
Synapse supports PostgreSQL versions 10 or later.
|
||||
|
||||
## Install postgres client libraries
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ As an example, here is the relevant section of the config file for `matrix.org`.
|
||||
turn_uris: [ "turn:turn.matrix.org?transport=udp", "turn:turn.matrix.org?transport=tcp" ]
|
||||
turn_shared_secret: "n0t4ctuAllymatr1Xd0TorgSshar3d5ecret4obvIousreAsons"
|
||||
turn_user_lifetime: 86400000
|
||||
turn_allow_guests: true
|
||||
turn_allow_guests: True
|
||||
|
||||
After updating the homeserver configuration, you must restart synapse:
|
||||
|
||||
|
||||
@@ -79,7 +79,7 @@ Here we can see that the request has been tagged with `GET-37`. (The tag depends
|
||||
grep 'GET-37' homeserver.log
|
||||
```
|
||||
|
||||
If you want to paste that output into a github issue or matrix room, please remember to surround it with triple-backticks (```) to make it legible (see [quoting code](https://help.github.com/en/articles/basic-writing-and-formatting-syntax#quoting-code)).
|
||||
If you want to paste that output into a github issue or matrix room, please remember to surround it with triple-backticks (```) to make it legible (see https://help.github.com/en/articles/basic-writing-and-formatting-syntax#quoting-code).
|
||||
|
||||
|
||||
What do all those fields in the 'Processed' line mean?
|
||||
|
||||
@@ -3355,7 +3355,7 @@ Configuration settings related to push notifications
|
||||
This setting defines options for push notifications.
|
||||
|
||||
This option has a number of sub-options. They are as follows:
|
||||
* `enabled`: Enables or disables push notification calculation. Note, disabling this will also
|
||||
* `enable_push`: Enables or disables push notification calculation. Note, disabling this will also
|
||||
stop unread counts being calculated for rooms. This mode of operation is intended
|
||||
for homeservers which may only have bots or appservice users connected, or are otherwise
|
||||
not interested in push/unread counters. This is enabled by default.
|
||||
@@ -3379,7 +3379,7 @@ This option has a number of sub-options. They are as follows:
|
||||
Example configuration:
|
||||
```yaml
|
||||
push:
|
||||
enabled: true
|
||||
enable_push: true
|
||||
include_content: false
|
||||
group_unread_count_by_room: false
|
||||
```
|
||||
|
||||
14
mypy.ini
14
mypy.ini
@@ -88,9 +88,6 @@ disallow_untyped_defs = False
|
||||
[mypy-tests.*]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-tests.handlers.test_sso]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.handlers.test_user_directory]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
@@ -106,7 +103,16 @@ disallow_untyped_defs = True
|
||||
[mypy-tests.state.test_profile]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.storage.*]
|
||||
[mypy-tests.storage.test_id_generators]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.storage.test_profile]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.handlers.test_sso]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.storage.test_user_directory]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.rest.*]
|
||||
|
||||
150
poetry.lock
generated
150
poetry.lock
generated
@@ -13,8 +13,8 @@ tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900
|
||||
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.2.0"
|
||||
name = "Authlib"
|
||||
version = "1.1.0"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
category = "main"
|
||||
optional = true
|
||||
@@ -106,11 +106,11 @@ frozendict = ["frozendict (>=1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.12.7"
|
||||
version = "2021.10.8"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
@@ -186,7 +186,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "38.0.4"
|
||||
version = "38.0.3"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
category = "main"
|
||||
optional = false
|
||||
@@ -260,7 +260,7 @@ pyflakes = ">=2.5.0,<2.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-bugbear"
|
||||
version = "22.12.6"
|
||||
version = "22.10.27"
|
||||
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -633,11 +633,14 @@ tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pyte
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "22.0"
|
||||
version = "21.3"
|
||||
description = "Core utilities for Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
|
||||
|
||||
[[package]]
|
||||
name = "parameterized"
|
||||
@@ -660,7 +663,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "8.13.2"
|
||||
version = "8.13.0"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
category = "main"
|
||||
optional = false
|
||||
@@ -834,14 +837,6 @@ category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
|
||||
[[package]]
|
||||
name = "pyicu"
|
||||
version = "2.10.2"
|
||||
description = "Python extension wrapping the ICU C++ API"
|
||||
category = "main"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
version = "2.4.0"
|
||||
@@ -906,6 +901,17 @@ cryptography = ">=38.0.0,<39"
|
||||
docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"]
|
||||
test = ["flaky", "pretend", "pytest (>=3.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.0.7"
|
||||
description = "Python parsing module"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.extras]
|
||||
diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pyrsistent"
|
||||
version = "0.18.1"
|
||||
@@ -1289,7 +1295,7 @@ docs = ["sphinx (>=1.4.8)"]
|
||||
|
||||
[[package]]
|
||||
name = "twine"
|
||||
version = "4.0.2"
|
||||
version = "4.0.1"
|
||||
description = "Collection of utilities for publishing packages on PyPI"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1434,7 +1440,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "9.3.0.4"
|
||||
version = "9.3.0.1"
|
||||
description = "Typing stubs for Pillow"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1469,7 +1475,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.28.11.5"
|
||||
version = "2.28.11.2"
|
||||
description = "Typing stubs for requests"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1480,7 +1486,7 @@ types-urllib3 = "<1.27"
|
||||
|
||||
[[package]]
|
||||
name = "types-setuptools"
|
||||
version = "65.6.0.1"
|
||||
version = "65.5.0.3"
|
||||
description = "Typing stubs for setuptools"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1616,7 +1622,7 @@ docs = ["Sphinx", "repoze.sphinx.autointerface"]
|
||||
test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"]
|
||||
|
||||
[extras]
|
||||
all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler", "pyicu"]
|
||||
all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "txredisapi", "hiredis", "Pympler"]
|
||||
cache-memory = ["Pympler"]
|
||||
jwt = ["authlib"]
|
||||
matrix-synapse-ldap3 = ["matrix-synapse-ldap3"]
|
||||
@@ -1629,21 +1635,20 @@ sentry = ["sentry-sdk"]
|
||||
systemd = ["systemd-python"]
|
||||
test = ["parameterized", "idna"]
|
||||
url-preview = ["lxml"]
|
||||
user-search = ["pyicu"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.7.1"
|
||||
content-hash = "f20007013f33bc35a01e412c48adc62a936030f3074e06286674c5ad7f44d300"
|
||||
content-hash = "27811bd21d56ceeb0f68ded5a00375efcd1a004928f0736f5b02927ce8594cb0"
|
||||
|
||||
[metadata.files]
|
||||
attrs = [
|
||||
{file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
|
||||
{file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
|
||||
]
|
||||
authlib = [
|
||||
{file = "Authlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a"},
|
||||
{file = "Authlib-1.2.0.tar.gz", hash = "sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d"},
|
||||
Authlib = [
|
||||
{file = "Authlib-1.1.0-py2.py3-none-any.whl", hash = "sha256:be4b6a1dea51122336c210a6945b27a105b9ac572baffd15b07bcff4376c1523"},
|
||||
{file = "Authlib-1.1.0.tar.gz", hash = "sha256:0a270c91409fc2b7b0fbee6996e09f2ee3187358762111a9a4225c874b94e891"},
|
||||
]
|
||||
automat = [
|
||||
{file = "Automat-22.10.0-py2.py3-none-any.whl", hash = "sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180"},
|
||||
@@ -1704,8 +1709,8 @@ canonicaljson = [
|
||||
{file = "canonicaljson-1.6.4.tar.gz", hash = "sha256:6c09b2119511f30eb1126cfcd973a10824e20f1cfd25039cde3d1218dd9c8d8f"},
|
||||
]
|
||||
certifi = [
|
||||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
{file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
|
||||
{file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
|
||||
]
|
||||
cffi = [
|
||||
{file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
|
||||
@@ -1783,32 +1788,32 @@ constantly = [
|
||||
{file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"},
|
||||
]
|
||||
cryptography = [
|
||||
{file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-win32.whl", hash = "sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744"},
|
||||
{file = "cryptography-38.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d"},
|
||||
{file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353"},
|
||||
{file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7"},
|
||||
{file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876"},
|
||||
{file = "cryptography-38.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee"},
|
||||
{file = "cryptography-38.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9"},
|
||||
{file = "cryptography-38.0.4.tar.gz", hash = "sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-win32.whl", hash = "sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0"},
|
||||
{file = "cryptography-38.0.3-cp36-abi3-win_amd64.whl", hash = "sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220"},
|
||||
{file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd"},
|
||||
{file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55"},
|
||||
{file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249"},
|
||||
{file = "cryptography-38.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548"},
|
||||
{file = "cryptography-38.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a"},
|
||||
{file = "cryptography-38.0.3.tar.gz", hash = "sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd"},
|
||||
]
|
||||
defusedxml = [
|
||||
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
|
||||
@@ -1831,8 +1836,8 @@ flake8 = [
|
||||
{file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"},
|
||||
]
|
||||
flake8-bugbear = [
|
||||
{file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"},
|
||||
{file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"},
|
||||
{file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"},
|
||||
{file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"},
|
||||
]
|
||||
flake8-comprehensions = [
|
||||
{file = "flake8-comprehensions-3.10.1.tar.gz", hash = "sha256:412052ac4a947f36b891143430fef4859705af11b2572fbb689f90d372cf26ab"},
|
||||
@@ -2241,8 +2246,8 @@ opentracing = [
|
||||
{file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
|
||||
]
|
||||
packaging = [
|
||||
{file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"},
|
||||
{file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"},
|
||||
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
||||
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
||||
]
|
||||
parameterized = [
|
||||
{file = "parameterized-0.8.1-py2.py3-none-any.whl", hash = "sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9"},
|
||||
@@ -2253,8 +2258,8 @@ pathspec = [
|
||||
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
|
||||
]
|
||||
phonenumbers = [
|
||||
{file = "phonenumbers-8.13.2-py2.py3-none-any.whl", hash = "sha256:884b26f775205261f4dc861371dce217c1661a4942fb3ec3624e290fb51869bf"},
|
||||
{file = "phonenumbers-8.13.2.tar.gz", hash = "sha256:0179f688d48c0e7e161eb7b9d86d587940af1f5174f97c1fdfd893c599c0d94a"},
|
||||
{file = "phonenumbers-8.13.0-py2.py3-none-any.whl", hash = "sha256:dbaea9e4005a976bcf18fbe2bb87cb9cd0a3f119136f04188ac412d7741cebf0"},
|
||||
{file = "phonenumbers-8.13.0.tar.gz", hash = "sha256:93745d7afd38e246660bb601b07deac54eeb76c8e5e43f5e83333b0383a0a1e4"},
|
||||
]
|
||||
pillow = [
|
||||
{file = "Pillow-9.3.0-1-cp37-cp37m-win32.whl", hash = "sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74"},
|
||||
@@ -2422,9 +2427,6 @@ pygments = [
|
||||
{file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"},
|
||||
{file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"},
|
||||
]
|
||||
pyicu = [
|
||||
{file = "PyICU-2.10.2.tar.gz", hash = "sha256:0c3309eea7fab6857507ace62403515b60fe096cbfb4f90d14f55ff75c5441c1"},
|
||||
]
|
||||
pyjwt = [
|
||||
{file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"},
|
||||
{file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"},
|
||||
@@ -2453,6 +2455,10 @@ pyopenssl = [
|
||||
{file = "pyOpenSSL-22.1.0-py3-none-any.whl", hash = "sha256:b28437c9773bb6c6958628cf9c3bebe585de661dba6f63df17111966363dd15e"},
|
||||
{file = "pyOpenSSL-22.1.0.tar.gz", hash = "sha256:7a83b7b272dd595222d672f5ce29aa030f1fb837630ef229f62e72e395ce8968"},
|
||||
]
|
||||
pyparsing = [
|
||||
{file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
|
||||
{file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
|
||||
]
|
||||
pyrsistent = [
|
||||
{file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"},
|
||||
{file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"},
|
||||
@@ -2723,8 +2729,8 @@ treq = [
|
||||
{file = "treq-22.2.0.tar.gz", hash = "sha256:df757e3f141fc782ede076a604521194ffcb40fa2645cf48e5a37060307f52ec"},
|
||||
]
|
||||
twine = [
|
||||
{file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"},
|
||||
{file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"},
|
||||
{file = "twine-4.0.1-py3-none-any.whl", hash = "sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e"},
|
||||
{file = "twine-4.0.1.tar.gz", hash = "sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0"},
|
||||
]
|
||||
twisted = [
|
||||
{file = "Twisted-22.10.0-py3-none-any.whl", hash = "sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0"},
|
||||
@@ -2803,8 +2809,8 @@ types-opentracing = [
|
||||
{file = "types_opentracing-2.4.10-py3-none-any.whl", hash = "sha256:66d9cfbbdc4a6f8ca8189a15ad26f0fe41cee84c07057759c5d194e2505b84c2"},
|
||||
]
|
||||
types-pillow = [
|
||||
{file = "types-Pillow-9.3.0.4.tar.gz", hash = "sha256:c18d466dc18550d96b8b4a279ff94f0cbad696825b5ad55466604f1daf5709de"},
|
||||
{file = "types_Pillow-9.3.0.4-py3-none-any.whl", hash = "sha256:98b8484ff343676f6f7051682a6cfd26896e993e86b3ce9badfa0ec8750f5405"},
|
||||
{file = "types-Pillow-9.3.0.1.tar.gz", hash = "sha256:f3b7cada3fa496c78d75253c6b1f07a843d625f42e5639b320a72acaff6f7cfb"},
|
||||
{file = "types_Pillow-9.3.0.1-py3-none-any.whl", hash = "sha256:79837755fe9659f29efd1016e9903ac4a500e0c73260483f07296bd6ca47668b"},
|
||||
]
|
||||
types-psycopg2 = [
|
||||
{file = "types-psycopg2-2.9.21.2.tar.gz", hash = "sha256:bff045579642ce00b4a3c8f2e401b7f96dfaa34939f10be64b0dd3b53feca57d"},
|
||||
@@ -2819,12 +2825,12 @@ types-pyyaml = [
|
||||
{file = "types_PyYAML-6.0.12.2-py3-none-any.whl", hash = "sha256:1e94e80aafee07a7e798addb2a320e32956a373f376655128ae20637adb2655b"},
|
||||
]
|
||||
types-requests = [
|
||||
{file = "types-requests-2.28.11.5.tar.gz", hash = "sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a"},
|
||||
{file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"},
|
||||
{file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"},
|
||||
{file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"},
|
||||
]
|
||||
types-setuptools = [
|
||||
{file = "types-setuptools-65.6.0.1.tar.gz", hash = "sha256:a03cf72f336929c9405f485dd90baef31a401776675f785f69a5a519f0b099ca"},
|
||||
{file = "types_setuptools-65.6.0.1-py3-none-any.whl", hash = "sha256:c957599502195ab98e90f0560466fa963f6a23373905e6d4e1772dbfaf1e44b7"},
|
||||
{file = "types-setuptools-65.5.0.3.tar.gz", hash = "sha256:17769171f5f2a2dc69b25c0d3106552a5cda767bbf6b36cb6212b26dae5aa9fc"},
|
||||
{file = "types_setuptools-65.5.0.3-py3-none-any.whl", hash = "sha256:9254c32b0cc91c486548e7d7561243b5bd185402a383e93c6691e1b9bc8d86e2"},
|
||||
]
|
||||
types-urllib3 = [
|
||||
{file = "types-urllib3-1.26.10.tar.gz", hash = "sha256:a26898f530e6c3f43f25b907f2b884486868ffd56a9faa94cbf9b3eb6e165d6a"},
|
||||
|
||||
@@ -57,7 +57,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.73.0"
|
||||
version = "1.73.0rc2"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -141,8 +141,7 @@ pyasn1 = ">=0.1.9"
|
||||
pyasn1-modules = ">=0.0.7"
|
||||
bcrypt = ">=3.1.7"
|
||||
Pillow = ">=5.4.0"
|
||||
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
|
||||
sortedcontainers = ">=1.5.2"
|
||||
sortedcontainers = ">=1.4.4"
|
||||
pymacaroons = ">=0.13.0"
|
||||
msgpack = ">=0.5.2"
|
||||
phonenumbers = ">=8.2.0"
|
||||
@@ -208,7 +207,6 @@ hiredis = { version = "*", optional = true }
|
||||
Pympler = { version = "*", optional = true }
|
||||
parameterized = { version = ">=0.7.4", optional = true }
|
||||
idna = { version = ">=2.5", optional = true }
|
||||
pyicu = { version = ">=2.10.2", optional = true }
|
||||
|
||||
[tool.poetry.extras]
|
||||
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
|
||||
@@ -231,10 +229,6 @@ redis = ["txredisapi", "hiredis"]
|
||||
# Required to use experimental `caches.track_memory_usage` config option.
|
||||
cache-memory = ["pympler"]
|
||||
test = ["parameterized", "idna"]
|
||||
# Allows for better search for international characters in the user directory. This
|
||||
# requires libicu's development headers installed on the system (e.g. libicu-dev on
|
||||
# Debian-based distributions).
|
||||
user-search = ["pyicu"]
|
||||
|
||||
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
|
||||
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
|
||||
@@ -266,8 +260,6 @@ all = [
|
||||
"txredisapi", "hiredis",
|
||||
# cache-memory
|
||||
"pympler",
|
||||
# improved user search
|
||||
"pyicu",
|
||||
# omitted:
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
# - systemd: this is a system-based requirement
|
||||
|
||||
77
rust/benches/tree_cache.rs
Normal file
77
rust/benches/tree_cache.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
// Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#![feature(test)]
|
||||
|
||||
use synapse::tree_cache::TreeCache;
|
||||
use test::Bencher;
|
||||
|
||||
extern crate test;
|
||||
|
||||
#[bench]
|
||||
fn bench_tree_cache_get_non_empty(b: &mut Bencher) {
|
||||
let mut cache: TreeCache<&str, &str> = TreeCache::new();
|
||||
|
||||
cache.set(["a", "b", "c", "d"], "f").unwrap();
|
||||
|
||||
b.iter(|| cache.get(&["a", "b", "c", "d"]));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_tree_cache_get_empty(b: &mut Bencher) {
|
||||
let cache: TreeCache<&str, &str> = TreeCache::new();
|
||||
|
||||
b.iter(|| cache.get(&["a", "b", "c", "d"]));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_tree_cache_set(b: &mut Bencher) {
|
||||
let mut cache: TreeCache<&str, &str> = TreeCache::new();
|
||||
|
||||
b.iter(|| cache.set(["a", "b", "c", "d"], "f").unwrap());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_tree_cache_length(b: &mut Bencher) {
|
||||
let mut cache: TreeCache<u32, u32> = TreeCache::new();
|
||||
|
||||
for c1 in 0..=10 {
|
||||
for c2 in 0..=10 {
|
||||
for c3 in 0..=10 {
|
||||
for c4 in 0..=10 {
|
||||
cache.set([c1, c2, c3, c4], 1).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
b.iter(|| cache.len());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn tree_cache_iterate(b: &mut Bencher) {
|
||||
let mut cache: TreeCache<u32, u32> = TreeCache::new();
|
||||
|
||||
for c1 in 0..=10 {
|
||||
for c2 in 0..=10 {
|
||||
for c3 in 0..=10 {
|
||||
for c4 in 0..=10 {
|
||||
cache.set([c1, c2, c3, c4], 1).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
b.iter(|| cache.items().count());
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
use pyo3::prelude::*;
|
||||
|
||||
pub mod push;
|
||||
pub mod tree_cache;
|
||||
|
||||
/// Returns the hash of all the rust source files at the time it was compiled.
|
||||
///
|
||||
@@ -26,6 +27,7 @@ fn synapse_rust(py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||
m.add_function(wrap_pyfunction!(get_rust_file_digest, m)?)?;
|
||||
|
||||
push::register_module(py, m)?;
|
||||
tree_cache::binding::register_module(py, m)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
247
rust/src/tree_cache/binding.rs
Normal file
247
rust/src/tree_cache/binding.rs
Normal file
@@ -0,0 +1,247 @@
|
||||
use std::hash::Hash;
|
||||
|
||||
use anyhow::Error;
|
||||
use pyo3::{
|
||||
pyclass, pymethods,
|
||||
types::{PyModule, PyTuple},
|
||||
IntoPy, PyAny, PyObject, PyResult, Python, ToPyObject,
|
||||
};
|
||||
|
||||
use super::TreeCache;
|
||||
|
||||
pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||
let child_module = PyModule::new(py, "tree_cache")?;
|
||||
child_module.add_class::<PythonTreeCache>()?;
|
||||
child_module.add_class::<StringTreeCache>()?;
|
||||
|
||||
m.add_submodule(child_module)?;
|
||||
|
||||
// We need to manually add the module to sys.modules to make `from
|
||||
// synapse.synapse_rust import push` work.
|
||||
py.import("sys")?
|
||||
.getattr("modules")?
|
||||
.set_item("synapse.synapse_rust.tree_cache", child_module)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct HashablePyObject {
|
||||
obj: PyObject,
|
||||
hash: isize,
|
||||
}
|
||||
|
||||
impl HashablePyObject {
|
||||
pub fn new(obj: &PyAny) -> Result<Self, Error> {
|
||||
let hash = obj.hash()?;
|
||||
|
||||
Ok(HashablePyObject {
|
||||
obj: obj.to_object(obj.py()),
|
||||
hash,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoPy<PyObject> for HashablePyObject {
|
||||
fn into_py(self, _: Python<'_>) -> PyObject {
|
||||
self.obj.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoPy<PyObject> for &HashablePyObject {
|
||||
fn into_py(self, _: Python<'_>) -> PyObject {
|
||||
self.obj.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToPyObject for HashablePyObject {
|
||||
fn to_object(&self, _py: Python<'_>) -> PyObject {
|
||||
self.obj.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for HashablePyObject {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.hash.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for HashablePyObject {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let equal = Python::with_gil(|py| {
|
||||
let result = self.obj.as_ref(py).eq(other.obj.as_ref(py));
|
||||
result.unwrap_or(false)
|
||||
});
|
||||
|
||||
equal
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for HashablePyObject {}
|
||||
|
||||
#[pyclass]
|
||||
struct PythonTreeCache(TreeCache<HashablePyObject, PyObject>);
|
||||
|
||||
#[pymethods]
|
||||
impl PythonTreeCache {
|
||||
#[new]
|
||||
fn new() -> Self {
|
||||
PythonTreeCache(Default::default())
|
||||
}
|
||||
|
||||
pub fn set(&mut self, key: &PyAny, value: PyObject) -> Result<(), Error> {
|
||||
let v: Vec<HashablePyObject> = key
|
||||
.iter()?
|
||||
.map(|obj| HashablePyObject::new(obj?))
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
self.0.set(v, value)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_node<'a>(
|
||||
&'a self,
|
||||
py: Python<'a>,
|
||||
key: &'a PyAny,
|
||||
) -> Result<Option<Vec<(&'a PyTuple, &'a PyObject)>>, Error> {
|
||||
let v: Vec<HashablePyObject> = key
|
||||
.iter()?
|
||||
.map(|obj| HashablePyObject::new(obj?))
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
let Some(node) = self.0.get_node(v.clone())? else {
|
||||
return Ok(None)
|
||||
};
|
||||
|
||||
let items = node
|
||||
.items()
|
||||
.map(|(k, value)| {
|
||||
let vec = v.iter().chain(k.iter().map(|a| *a)).collect::<Vec<_>>();
|
||||
let nk = PyTuple::new(py, vec);
|
||||
(nk, value)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(Some(items))
|
||||
}
|
||||
|
||||
pub fn get(&self, key: &PyAny) -> Result<Option<&PyObject>, Error> {
|
||||
let v: Vec<HashablePyObject> = key
|
||||
.iter()?
|
||||
.map(|obj| HashablePyObject::new(obj?))
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
Ok(self.0.get(&v)?)
|
||||
}
|
||||
|
||||
pub fn pop_node<'a>(
|
||||
&'a mut self,
|
||||
py: Python<'a>,
|
||||
key: &'a PyAny,
|
||||
) -> Result<Option<Vec<(&'a PyTuple, PyObject)>>, Error> {
|
||||
let v: Vec<HashablePyObject> = key
|
||||
.iter()?
|
||||
.map(|obj| HashablePyObject::new(obj?))
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
let Some(node) = self.0.pop_node(v.clone())? else {
|
||||
return Ok(None)
|
||||
};
|
||||
|
||||
let items = node
|
||||
.into_items()
|
||||
.map(|(k, value)| {
|
||||
let vec = v.iter().chain(k.iter()).collect::<Vec<_>>();
|
||||
let nk = PyTuple::new(py, vec);
|
||||
(nk, value)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(Some(items))
|
||||
}
|
||||
|
||||
pub fn pop(&mut self, key: &PyAny) -> Result<Option<PyObject>, Error> {
|
||||
let v: Vec<HashablePyObject> = key
|
||||
.iter()?
|
||||
.map(|obj| HashablePyObject::new(obj?))
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
Ok(self.0.pop(&v)?)
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.0.clear()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn values(&self) -> Vec<&PyObject> {
|
||||
self.0.values().collect()
|
||||
}
|
||||
|
||||
pub fn items(&self) -> Vec<(Vec<&HashablePyObject>, &PyObject)> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[pyclass]
|
||||
struct StringTreeCache(TreeCache<String, String>);
|
||||
|
||||
#[pymethods]
|
||||
impl StringTreeCache {
|
||||
#[new]
|
||||
fn new() -> Self {
|
||||
StringTreeCache(Default::default())
|
||||
}
|
||||
|
||||
pub fn set(&mut self, key: &PyAny, value: String) -> Result<(), Error> {
|
||||
let key = key
|
||||
.iter()?
|
||||
.map(|o| o.expect("iter failed").extract().expect("not a string"));
|
||||
|
||||
self.0.set(key, value)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// pub fn get_node(&self, key: &PyAny) -> Result<Option<&TreeCacheNode<K, PyObject>>, Error> {
|
||||
// todo!()
|
||||
// }
|
||||
|
||||
pub fn get(&self, key: &PyAny) -> Result<Option<&String>, Error> {
|
||||
let key = key.iter()?.map(|o| {
|
||||
o.expect("iter failed")
|
||||
.extract::<String>()
|
||||
.expect("not a string")
|
||||
});
|
||||
|
||||
Ok(self.0.get(key)?)
|
||||
}
|
||||
|
||||
// pub fn pop_node(&mut self, key: &PyAny) -> Result<Option<TreeCacheNode<K, PyObject>>, Error> {
|
||||
// todo!()
|
||||
// }
|
||||
|
||||
pub fn pop(&mut self, key: Vec<String>) -> Result<Option<String>, Error> {
|
||||
Ok(self.0.pop(&key)?)
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.0.clear()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn values(&self) -> Vec<&String> {
|
||||
self.0.values().collect()
|
||||
}
|
||||
|
||||
pub fn items(&self) -> Vec<(Vec<&HashablePyObject>, &PyObject)> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
421
rust/src/tree_cache/mod.rs
Normal file
421
rust/src/tree_cache/mod.rs
Normal file
@@ -0,0 +1,421 @@
|
||||
use std::{borrow::Borrow, collections::HashMap, hash::Hash};
|
||||
|
||||
use anyhow::{bail, Error};
|
||||
|
||||
pub mod binding;
|
||||
|
||||
pub enum TreeCacheNode<K, V> {
|
||||
Leaf(V),
|
||||
Branch(usize, HashMap<K, TreeCacheNode<K, V>>),
|
||||
}
|
||||
|
||||
impl<K, V> TreeCacheNode<K, V> {
|
||||
pub fn new_branch() -> Self {
|
||||
TreeCacheNode::Branch(0, Default::default())
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
match self {
|
||||
TreeCacheNode::Leaf(_) => 1,
|
||||
TreeCacheNode::Branch(size, _) => *size,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K: Eq + Hash + 'a, V> TreeCacheNode<K, V> {
|
||||
pub fn set(
|
||||
&mut self,
|
||||
mut key: impl Iterator<Item = K>,
|
||||
value: V,
|
||||
) -> Result<(usize, usize), Error> {
|
||||
if let Some(k) = key.next() {
|
||||
match self {
|
||||
TreeCacheNode::Leaf(_) => bail!("Given key is too long"),
|
||||
TreeCacheNode::Branch(size, map) => {
|
||||
let node = map.entry(k).or_insert_with(TreeCacheNode::new_branch);
|
||||
let (added, removed) = node.set(key, value)?;
|
||||
|
||||
*size += added;
|
||||
*size -= removed;
|
||||
|
||||
Ok((added, removed))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let added = if let TreeCacheNode::Branch(_, map) = self {
|
||||
(1, map.len())
|
||||
} else {
|
||||
(0, 0)
|
||||
};
|
||||
|
||||
*self = TreeCacheNode::Leaf(value);
|
||||
|
||||
Ok(added)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pop<Q>(
|
||||
&mut self,
|
||||
current_key: Q,
|
||||
mut next_keys: impl Iterator<Item = Q>,
|
||||
) -> Result<Option<TreeCacheNode<K, V>>, Error>
|
||||
where
|
||||
Q: Borrow<K>,
|
||||
Q: Hash + Eq + 'a,
|
||||
{
|
||||
if let Some(next_key) = next_keys.next() {
|
||||
match self {
|
||||
TreeCacheNode::Leaf(_) => bail!("Given key is too long"),
|
||||
TreeCacheNode::Branch(size, map) => {
|
||||
let node = if let Some(node) = map.get_mut(current_key.borrow()) {
|
||||
node
|
||||
} else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
if let Some(popped) = node.pop(next_key, next_keys)? {
|
||||
*size -= node.len();
|
||||
|
||||
Ok(Some(popped))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match self {
|
||||
TreeCacheNode::Leaf(_) => bail!("Given key is too long"),
|
||||
TreeCacheNode::Branch(size, map) => {
|
||||
if let Some(node) = map.remove(current_key.borrow()) {
|
||||
*size -= node.len();
|
||||
|
||||
Ok(Some(node))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn items(&'a self) -> impl Iterator<Item = (Vec<&K>, &V)> {
|
||||
// To avoid a lot of mallocs we guess the length of the key. Ideally
|
||||
// we'd know this.
|
||||
let capacity_guesstimate = 10;
|
||||
|
||||
let mut stack = vec![(Vec::with_capacity(capacity_guesstimate), self)];
|
||||
|
||||
std::iter::from_fn(move || {
|
||||
while let Some((prefix, node)) = stack.pop() {
|
||||
match node {
|
||||
TreeCacheNode::Leaf(value) => return Some((prefix, value)),
|
||||
TreeCacheNode::Branch(_, map) => {
|
||||
stack.extend(map.iter().map(|(k, v)| {
|
||||
let mut new_prefix = Vec::with_capacity(capacity_guesstimate);
|
||||
new_prefix.extend_from_slice(&prefix);
|
||||
new_prefix.push(k);
|
||||
(new_prefix, v)
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
pub fn values(&'a self) -> impl Iterator<Item = &V> {
|
||||
let mut stack = vec![self];
|
||||
|
||||
std::iter::from_fn(move || {
|
||||
while let Some(node) = stack.pop() {
|
||||
match node {
|
||||
TreeCacheNode::Leaf(value) => return Some(value),
|
||||
TreeCacheNode::Branch(_, map) => {
|
||||
stack.extend(map.iter().map(|(_k, v)| v));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K: Clone + Eq + Hash + 'a, V> TreeCacheNode<K, V> {
|
||||
pub fn into_items(self) -> impl Iterator<Item = (Vec<K>, V)> {
|
||||
let mut stack = vec![(Vec::new(), self)];
|
||||
|
||||
std::iter::from_fn(move || {
|
||||
while let Some((prefix, node)) = stack.pop() {
|
||||
match node {
|
||||
TreeCacheNode::Leaf(value) => return Some((prefix, value)),
|
||||
TreeCacheNode::Branch(_, map) => {
|
||||
stack.extend(map.into_iter().map(|(k, v)| {
|
||||
let mut prefix = prefix.clone();
|
||||
prefix.push(k);
|
||||
(prefix, v)
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for TreeCacheNode<K, V> {
|
||||
fn default() -> Self {
|
||||
TreeCacheNode::new_branch()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TreeCache<K, V> {
|
||||
root: TreeCacheNode<K, V>,
|
||||
}
|
||||
|
||||
impl<K, V> TreeCache<K, V> {
|
||||
pub fn new() -> Self {
|
||||
TreeCache {
|
||||
root: TreeCacheNode::new_branch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K: Eq + Hash + 'a, V> TreeCache<K, V> {
|
||||
pub fn set(&mut self, key: impl IntoIterator<Item = K>, value: V) -> Result<(), Error> {
|
||||
self.root.set(key.into_iter(), value)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_node<Q>(
|
||||
&self,
|
||||
key: impl IntoIterator<Item = Q>,
|
||||
) -> Result<Option<&TreeCacheNode<K, V>>, Error>
|
||||
where
|
||||
Q: Borrow<K>,
|
||||
Q: Hash + Eq + 'a,
|
||||
{
|
||||
let mut node = &self.root;
|
||||
|
||||
for k in key {
|
||||
match node {
|
||||
TreeCacheNode::Leaf(_) => bail!("Given key is too long"),
|
||||
TreeCacheNode::Branch(_, map) => {
|
||||
node = if let Some(node) = map.get(k.borrow()) {
|
||||
node
|
||||
} else {
|
||||
return Ok(None);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Some(node))
|
||||
}
|
||||
|
||||
pub fn get<Q>(&self, key: impl IntoIterator<Item = Q>) -> Result<Option<&V>, Error>
|
||||
where
|
||||
Q: Borrow<K>,
|
||||
Q: Hash + Eq + 'a,
|
||||
{
|
||||
if let Some(node) = self.get_node(key)? {
|
||||
match node {
|
||||
TreeCacheNode::Leaf(value) => Ok(Some(value)),
|
||||
TreeCacheNode::Branch(_, _) => bail!("Given key is too short"),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pop_node<Q>(
|
||||
&mut self,
|
||||
key: impl IntoIterator<Item = Q>,
|
||||
) -> Result<Option<TreeCacheNode<K, V>>, Error>
|
||||
where
|
||||
Q: Borrow<K>,
|
||||
Q: Hash + Eq + 'a,
|
||||
{
|
||||
let mut key_iter = key.into_iter();
|
||||
|
||||
let k = if let Some(k) = key_iter.next() {
|
||||
k
|
||||
} else {
|
||||
let node = std::mem::replace(&mut self.root, TreeCacheNode::new_branch());
|
||||
return Ok(Some(node));
|
||||
};
|
||||
|
||||
self.root.pop(k, key_iter)
|
||||
}
|
||||
|
||||
pub fn pop(&mut self, key: &[K]) -> Result<Option<V>, Error> {
|
||||
if let Some(node) = self.pop_node(key)? {
|
||||
match node {
|
||||
TreeCacheNode::Leaf(value) => Ok(Some(value)),
|
||||
TreeCacheNode::Branch(_, _) => bail!("Given key is too short"),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.root = TreeCacheNode::new_branch();
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
match self.root {
|
||||
TreeCacheNode::Leaf(_) => 1,
|
||||
TreeCacheNode::Branch(size, _) => size,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn values(&self) -> impl Iterator<Item = &V> {
|
||||
let mut stack = vec![&self.root];
|
||||
|
||||
std::iter::from_fn(move || {
|
||||
while let Some(node) = stack.pop() {
|
||||
match node {
|
||||
TreeCacheNode::Leaf(value) => return Some(value),
|
||||
TreeCacheNode::Branch(_, map) => {
|
||||
stack.extend(map.values());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
pub fn items(&self) -> impl Iterator<Item = (Vec<&K>, &V)> {
|
||||
self.root.items()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for TreeCache<K, V> {
|
||||
fn default() -> Self {
|
||||
TreeCache::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn get_set() -> Result<(), Error> {
|
||||
let mut cache = TreeCache::new();
|
||||
|
||||
cache.set(vec!["a", "b"], "c")?;
|
||||
|
||||
assert_eq!(cache.get(&["a", "b"])?, Some(&"c"));
|
||||
|
||||
let node = cache.get_node(&["a"])?.unwrap();
|
||||
|
||||
match node {
|
||||
TreeCacheNode::Leaf(_) => bail!("expected branch"),
|
||||
TreeCacheNode::Branch(_, map) => {
|
||||
assert_eq!(map.len(), 1);
|
||||
assert!(map.contains_key("b"));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn length() -> Result<(), Error> {
|
||||
let mut cache = TreeCache::new();
|
||||
|
||||
cache.set(vec!["a", "b"], "c")?;
|
||||
|
||||
assert_eq!(cache.len(), 1);
|
||||
|
||||
cache.set(vec!["a", "b"], "d")?;
|
||||
|
||||
assert_eq!(cache.len(), 1);
|
||||
|
||||
cache.set(vec!["e", "f"], "g")?;
|
||||
|
||||
assert_eq!(cache.len(), 2);
|
||||
|
||||
cache.set(vec!["e", "h"], "i")?;
|
||||
|
||||
assert_eq!(cache.len(), 3);
|
||||
|
||||
cache.set(vec!["e"], "i")?;
|
||||
|
||||
assert_eq!(cache.len(), 2);
|
||||
|
||||
cache.pop_node(&["a"])?;
|
||||
|
||||
assert_eq!(cache.len(), 1);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clear() -> Result<(), Error> {
|
||||
let mut cache = TreeCache::new();
|
||||
|
||||
cache.set(vec!["a", "b"], "c")?;
|
||||
|
||||
assert_eq!(cache.len(), 1);
|
||||
|
||||
cache.clear();
|
||||
|
||||
assert_eq!(cache.len(), 0);
|
||||
|
||||
assert_eq!(cache.get(&["a", "b"])?, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pop() -> Result<(), Error> {
|
||||
let mut cache = TreeCache::new();
|
||||
|
||||
cache.set(vec!["a", "b"], "c")?;
|
||||
assert_eq!(cache.pop(&["a", "b"])?, Some("c"));
|
||||
assert_eq!(cache.pop(&["a", "b"])?, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn values() -> Result<(), Error> {
|
||||
let mut cache = TreeCache::new();
|
||||
|
||||
cache.set(vec!["a", "b"], "c")?;
|
||||
|
||||
let expected = ["c"].iter().collect();
|
||||
assert_eq!(cache.values().collect::<BTreeSet<_>>(), expected);
|
||||
|
||||
cache.set(vec!["d", "e"], "f")?;
|
||||
|
||||
let expected = ["c", "f"].iter().collect();
|
||||
assert_eq!(cache.values().collect::<BTreeSet<_>>(), expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn items() -> Result<(), Error> {
|
||||
let mut cache = TreeCache::new();
|
||||
|
||||
cache.set(vec!["a", "b"], "c")?;
|
||||
cache.set(vec!["d", "e"], "f")?;
|
||||
|
||||
let expected = [(vec![&"a", &"b"], &"c"), (vec![&"d", &"e"], &"f")]
|
||||
.into_iter()
|
||||
.collect();
|
||||
assert_eq!(cache.items().collect::<BTreeSet<_>>(), expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -53,12 +53,6 @@ Run the complement test suite on Synapse.
|
||||
Only build the Docker images. Don't actually run Complement.
|
||||
Conflicts with -f/--fast.
|
||||
|
||||
-e, --editable
|
||||
Use an editable build of Synapse, rebuilding the image if necessary.
|
||||
This is suitable for use in development where a fast turn-around time
|
||||
is important.
|
||||
Not suitable for use in CI in case the editable environment is impure.
|
||||
|
||||
For help on arguments to 'go test', run 'go help testflag'.
|
||||
EOF
|
||||
}
|
||||
@@ -79,9 +73,6 @@ while [ $# -ge 1 ]; do
|
||||
"--build-only")
|
||||
skip_complement_run=1
|
||||
;;
|
||||
"-e"|"--editable")
|
||||
use_editable_synapse=1
|
||||
;;
|
||||
*)
|
||||
# unknown arg: presumably an argument to gotest. break the loop.
|
||||
break
|
||||
@@ -105,76 +96,25 @@ if [[ -z "$COMPLEMENT_DIR" ]]; then
|
||||
echo "Checkout available at 'complement-${COMPLEMENT_REF}'"
|
||||
fi
|
||||
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
if [[ -e synapse/synapse_rust.abi3.so ]]; then
|
||||
# In an editable install, back up the host's compiled Rust module to prevent
|
||||
# inconvenience; the container will overwrite the module with its own copy.
|
||||
mv -n synapse/synapse_rust.abi3.so synapse/synapse_rust.abi3.so~host
|
||||
# And restore it on exit:
|
||||
synapse_pkg=`realpath synapse`
|
||||
trap "mv -f '$synapse_pkg/synapse_rust.abi3.so~host' '$synapse_pkg/synapse_rust.abi3.so'" EXIT
|
||||
fi
|
||||
|
||||
editable_mount="$(realpath .):/editable-src:z"
|
||||
if docker inspect complement-synapse-editable &>/dev/null; then
|
||||
# complement-synapse-editable already exists: see if we can still use it:
|
||||
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
|
||||
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
|
||||
|
||||
# First set up the module in the right place for an editable installation.
|
||||
docker run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
if (docker run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
|
||||
&& docker run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
|
||||
skip_docker_build=1
|
||||
else
|
||||
echo "Editable Synapse image is stale. Will rebuild."
|
||||
unset skip_docker_build
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$skip_docker_build" ]; then
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
# Build the base Synapse image from the local checkout
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
|
||||
docker build -t matrixdotorg/synapse \
|
||||
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
|
||||
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
|
||||
-f "docker/Dockerfile" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build a special image designed for use in development with editable
|
||||
# installs.
|
||||
docker build -t synapse-editable \
|
||||
-f "docker/editable.Dockerfile" .
|
||||
# Build the workers docker image (from the base Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
|
||||
docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
docker build -t synapse-workers-editable \
|
||||
--build-arg FROM=synapse-editable \
|
||||
-f "docker/Dockerfile-workers" .
|
||||
|
||||
docker build -t complement-synapse-editable \
|
||||
--build-arg FROM=synapse-workers-editable \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
|
||||
# Prepare the Rust module
|
||||
docker run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
else
|
||||
|
||||
# Build the base Synapse image from the local checkout
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
|
||||
docker build -t matrixdotorg/synapse \
|
||||
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
|
||||
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
|
||||
-f "docker/Dockerfile" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build the workers docker image (from the base Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
|
||||
docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||
docker build -t complement-synapse \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
fi
|
||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||
docker build -t complement-synapse \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
echo_if_github "::endgroup::"
|
||||
fi
|
||||
|
||||
if [ -n "$skip_complement_run" ]; then
|
||||
@@ -183,10 +123,6 @@ if [ -n "$skip_complement_run" ]; then
|
||||
fi
|
||||
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse-editable
|
||||
export COMPLEMENT_HOST_MOUNTS="$editable_mount"
|
||||
fi
|
||||
|
||||
extra_test_args=()
|
||||
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Stub for PyICU.
|
||||
|
||||
class Locale:
|
||||
@staticmethod
|
||||
def getDefault() -> Locale: ...
|
||||
|
||||
class BreakIterator:
|
||||
@staticmethod
|
||||
def createWordInstance(locale: Locale) -> BreakIterator: ...
|
||||
def setText(self, text: str) -> None: ...
|
||||
def nextBoundary(self) -> int: ...
|
||||
@@ -222,7 +222,6 @@ def main() -> None:
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: Optional[Dict[str, Any]] = None
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
|
||||
@@ -230,7 +229,7 @@ def main() -> None:
|
||||
secret = args.shared_secret
|
||||
else:
|
||||
# argparse should check that we have either config or shared secret
|
||||
assert config is not None
|
||||
assert config
|
||||
|
||||
secret = config.get("registration_shared_secret")
|
||||
secret_file = config.get("registration_shared_secret_path")
|
||||
@@ -245,7 +244,7 @@ def main() -> None:
|
||||
|
||||
if args.server_url:
|
||||
server_url = args.server_url
|
||||
elif config is not None:
|
||||
elif config:
|
||||
server_url = _find_client_listener(config)
|
||||
if not server_url:
|
||||
server_url = _DEFAULT_SERVER_URL
|
||||
|
||||
@@ -230,9 +230,6 @@ class EventContentFields:
|
||||
# The authorising user for joining a restricted room.
|
||||
AUTHORISING_USER: Final = "join_authorised_via_users_server"
|
||||
|
||||
# an unspecced field added to to-device messages to identify them uniquely-ish
|
||||
TO_DEVICE_MSGID: Final = "org.matrix.msgid"
|
||||
|
||||
|
||||
class RoomTypes:
|
||||
"""Understood values of the room_type field of m.room.create events."""
|
||||
|
||||
@@ -300,8 +300,10 @@ class InteractiveAuthIncompleteError(Exception):
|
||||
class UnrecognizedRequestError(SynapseError):
|
||||
"""An error indicating we don't understand the request you're trying to make"""
|
||||
|
||||
def __init__(self, msg: str = "Unrecognized request", code: int = 400):
|
||||
super().__init__(code, msg, Codes.UNRECOGNIZED)
|
||||
def __init__(
|
||||
self, msg: str = "Unrecognized request", errcode: str = Codes.UNRECOGNIZED
|
||||
):
|
||||
super().__init__(400, msg, errcode)
|
||||
|
||||
|
||||
class NotFoundError(SynapseError):
|
||||
|
||||
@@ -28,8 +28,8 @@ from synapse.event_auth import auth_types_for_event
|
||||
from synapse.events import EventBase, _EventInternalMetadata, make_event_from_dict
|
||||
from synapse.state import StateHandler
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import EventID, JsonDict
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import Clock
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ from synapse.types import JsonDict, StateMap
|
||||
if TYPE_CHECKING:
|
||||
from synapse.storage.controllers import StorageControllers
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.storage.state import StateFilter
|
||||
|
||||
|
||||
@attr.s(slots=True, auto_attribs=True)
|
||||
|
||||
@@ -45,7 +45,6 @@ CHECK_CAN_DEACTIVATE_USER_CALLBACK = Callable[[str, bool], Awaitable[bool]]
|
||||
ON_PROFILE_UPDATE_CALLBACK = Callable[[str, ProfileInfo, bool, bool], Awaitable]
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK = Callable[[str, bool, bool], Awaitable]
|
||||
ON_THREEPID_BIND_CALLBACK = Callable[[str, str, str], Awaitable]
|
||||
UNBIND_THREEPID_CALLBACK = Callable[[str, str, str, str], Awaitable[Tuple[bool, bool]]]
|
||||
|
||||
|
||||
def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
|
||||
@@ -175,7 +174,6 @@ class ThirdPartyEventRules:
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
|
||||
] = []
|
||||
self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = []
|
||||
self._unbind_threepid_callbacks: List[UNBIND_THREEPID_CALLBACK] = []
|
||||
|
||||
def register_third_party_rules_callbacks(
|
||||
self,
|
||||
@@ -195,7 +193,6 @@ class ThirdPartyEventRules:
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
|
||||
] = None,
|
||||
on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None,
|
||||
unbind_threepid: Optional[UNBIND_THREEPID_CALLBACK] = None,
|
||||
) -> None:
|
||||
"""Register callbacks from modules for each hook."""
|
||||
if check_event_allowed is not None:
|
||||
@@ -233,9 +230,6 @@ class ThirdPartyEventRules:
|
||||
if on_threepid_bind is not None:
|
||||
self._on_threepid_bind_callbacks.append(on_threepid_bind)
|
||||
|
||||
if unbind_threepid is not None:
|
||||
self._unbind_threepid_callbacks.append(unbind_threepid)
|
||||
|
||||
async def check_event_allowed(
|
||||
self, event: EventBase, context: EventContext
|
||||
) -> Tuple[bool, Optional[dict]]:
|
||||
@@ -529,41 +523,3 @@ class ThirdPartyEventRules:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
)
|
||||
|
||||
async def unbind_threepid(
|
||||
self, user_id: str, medium: str, address: str, identity_server: str
|
||||
) -> Tuple[bool, bool]:
|
||||
"""Called before a threepid association is removed.
|
||||
|
||||
Note that this callback is called before an association is deleted on the
|
||||
local homeserver.
|
||||
|
||||
Args:
|
||||
user_id: the user being associated with the threepid.
|
||||
medium: the threepid's medium.
|
||||
address: the threepid's address.
|
||||
identity_server: the identity server where the threepid was successfully registered.
|
||||
|
||||
Returns:
|
||||
A tuple of 2 booleans reporting if a changed happened for the first, and if unbind
|
||||
needs to stop there for the second (True value). In this case no other module unbind will be
|
||||
called, and the default unbind made to the IS that was used on bind will also be skipped.
|
||||
In any case the mapping will be removed from the Synapse 3pid remote table, except if an Exception
|
||||
was raised at some point.
|
||||
"""
|
||||
|
||||
global_changed = False
|
||||
for callback in self._unbind_threepid_callbacks:
|
||||
try:
|
||||
(changed, stop) = await callback(
|
||||
user_id, medium, address, identity_server
|
||||
)
|
||||
global_changed |= changed
|
||||
if stop:
|
||||
return global_changed, True
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
)
|
||||
|
||||
return global_changed, False
|
||||
|
||||
@@ -771,28 +771,17 @@ class FederationClient(FederationBase):
|
||||
"""
|
||||
if synapse_error is None:
|
||||
synapse_error = e.to_synapse_error()
|
||||
# MSC3743 specifies that servers should return a 404 or 405 with an errcode
|
||||
# of M_UNRECOGNIZED when they receive a request to an unknown endpoint or
|
||||
# to an unknown method, respectively.
|
||||
# There is no good way to detect an "unknown" endpoint.
|
||||
#
|
||||
# Older versions of servers don't properly handle this. This needs to be
|
||||
# rather specific as some endpoints truly do return 404 errors.
|
||||
# Dendrite returns a 404 (with a body of "404 page not found");
|
||||
# Conduit returns a 404 (with no body); and Synapse returns a 400
|
||||
# with M_UNRECOGNIZED.
|
||||
#
|
||||
# This needs to be rather specific as some endpoints truly do return 404
|
||||
# errors.
|
||||
return (
|
||||
# 404 is an unknown endpoint, 405 is a known endpoint, but unknown method.
|
||||
(e.code == 404 or e.code == 405)
|
||||
and (
|
||||
# Older Dendrites returned a text or empty body.
|
||||
# Older Conduit returned an empty body.
|
||||
not e.response
|
||||
or e.response == b"404 page not found"
|
||||
# The proper response JSON with M_UNRECOGNIZED errcode.
|
||||
or synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
) or (
|
||||
# Older Synapses returned a 400 error.
|
||||
e.code == 400
|
||||
and synapse_error.errcode == Codes.UNRECOGNIZED
|
||||
)
|
||||
e.code == 404 and (not e.response or e.response == b"404 page not found")
|
||||
) or (e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED)
|
||||
|
||||
async def _try_destination_list(
|
||||
self,
|
||||
|
||||
@@ -641,7 +641,7 @@ class PerDestinationQueue:
|
||||
if not message_id:
|
||||
continue
|
||||
|
||||
set_tag(SynapseTags.TO_DEVICE_EDU_ID, message_id)
|
||||
set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
|
||||
|
||||
edus = [
|
||||
Edu(
|
||||
|
||||
@@ -578,6 +578,9 @@ class ApplicationServicesHandler:
|
||||
device_id,
|
||||
), messages in recipient_device_to_messages.items():
|
||||
for message_json in messages:
|
||||
# Remove 'message_id' from the to-device message, as it's an internal ID
|
||||
message_json.pop("message_id", None)
|
||||
|
||||
message_payload.append(
|
||||
{
|
||||
"to_user_id": user_id,
|
||||
@@ -612,8 +615,8 @@ class ApplicationServicesHandler:
|
||||
)
|
||||
|
||||
# Fetch the users who have modified their device list since then.
|
||||
users_with_changed_device_lists = await self.store.get_all_devices_changed(
|
||||
from_key, to_key=new_key
|
||||
users_with_changed_device_lists = (
|
||||
await self.store.get_users_whose_devices_changed(from_key, to_key=new_key)
|
||||
)
|
||||
|
||||
# Filter out any users the application service is not interested in
|
||||
|
||||
@@ -996,7 +996,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
|
||||
# Check if we are partially joining any rooms. If so we need to store
|
||||
# all device list updates so that we can handle them correctly once we
|
||||
# know who is in the room.
|
||||
# TODO(faster_joins): this fetches and processes a bunch of data that we don't
|
||||
# TODO(faster joins): this fetches and processes a bunch of data that we don't
|
||||
# use. Could be replaced by a tighter query e.g.
|
||||
# SELECT EXISTS(SELECT 1 FROM partial_state_rooms)
|
||||
partial_rooms = await self.store.get_partial_state_room_resync_info()
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict
|
||||
|
||||
from synapse.api.constants import EduTypes, EventContentFields, ToDeviceEventTypes
|
||||
from synapse.api.constants import EduTypes, ToDeviceEventTypes
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.logging.context import run_in_background
|
||||
@@ -216,24 +216,14 @@ class DeviceMessageHandler:
|
||||
"""
|
||||
sender_user_id = requester.user.to_string()
|
||||
|
||||
set_tag(SynapseTags.TO_DEVICE_TYPE, message_type)
|
||||
set_tag(SynapseTags.TO_DEVICE_SENDER, sender_user_id)
|
||||
message_id = random_string(16)
|
||||
set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
|
||||
|
||||
log_kv({"number_of_to_device_messages": len(messages)})
|
||||
set_tag("sender", sender_user_id)
|
||||
local_messages = {}
|
||||
remote_messages: Dict[str, Dict[str, Dict[str, JsonDict]]] = {}
|
||||
for user_id, by_device in messages.items():
|
||||
# add an opentracing log entry for each message
|
||||
for device_id, message_content in by_device.items():
|
||||
log_kv(
|
||||
{
|
||||
"event": "send_to_device_message",
|
||||
"user_id": user_id,
|
||||
"device_id": device_id,
|
||||
EventContentFields.TO_DEVICE_MSGID: message_content.get(
|
||||
EventContentFields.TO_DEVICE_MSGID
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
# Ratelimit local cross-user key requests by the sending device.
|
||||
if (
|
||||
message_type == ToDeviceEventTypes.RoomKeyRequest
|
||||
@@ -243,7 +233,6 @@ class DeviceMessageHandler:
|
||||
requester, (sender_user_id, requester.device_id)
|
||||
)
|
||||
if not allowed:
|
||||
log_kv({"message": f"dropping key requests to {user_id}"})
|
||||
logger.info(
|
||||
"Dropping room_key_request from %s to %s due to rate limit",
|
||||
sender_user_id,
|
||||
@@ -258,11 +247,18 @@ class DeviceMessageHandler:
|
||||
"content": message_content,
|
||||
"type": message_type,
|
||||
"sender": sender_user_id,
|
||||
"message_id": message_id,
|
||||
}
|
||||
for device_id, message_content in by_device.items()
|
||||
}
|
||||
if messages_by_device:
|
||||
local_messages[user_id] = messages_by_device
|
||||
log_kv(
|
||||
{
|
||||
"user_id": user_id,
|
||||
"device_id": list(messages_by_device),
|
||||
}
|
||||
)
|
||||
else:
|
||||
destination = get_domain_from_id(user_id)
|
||||
remote_messages.setdefault(destination, {})[user_id] = by_device
|
||||
@@ -271,11 +267,7 @@ class DeviceMessageHandler:
|
||||
|
||||
remote_edu_contents = {}
|
||||
for destination, messages in remote_messages.items():
|
||||
# The EDU contains a "message_id" property which is used for
|
||||
# idempotence. Make up a random one.
|
||||
message_id = random_string(16)
|
||||
log_kv({"destination": destination, "message_id": message_id})
|
||||
|
||||
log_kv({"destination": destination})
|
||||
remote_edu_contents[destination] = {
|
||||
"messages": messages,
|
||||
"sender": sender_user_id,
|
||||
|
||||
@@ -70,8 +70,8 @@ from synapse.replication.http.federation import (
|
||||
)
|
||||
from synapse.storage.databases.main.events import PartialStateConflictError
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import JsonDict, get_domain_from_id
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
from synapse.visibility import filter_events_for_server
|
||||
@@ -152,7 +152,6 @@ class FederationHandler:
|
||||
self._federation_event_handler = hs.get_federation_event_handler()
|
||||
self._device_handler = hs.get_device_handler()
|
||||
self._bulk_push_rule_evaluator = hs.get_bulk_push_rule_evaluator()
|
||||
self._notifier = hs.get_notifier()
|
||||
|
||||
self._clean_room_for_join_client = ReplicationCleanRoomRestServlet.make_client(
|
||||
hs
|
||||
@@ -1693,9 +1692,6 @@ class FederationHandler:
|
||||
self._storage_controllers.state.notify_room_un_partial_stated(
|
||||
room_id
|
||||
)
|
||||
# Poke the notifier so that other workers see the write to
|
||||
# the un-partial-stated rooms stream.
|
||||
self._notifier.notify_replication()
|
||||
|
||||
# TODO(faster_joins) update room stats and user directory?
|
||||
# https://github.com/matrix-org/synapse/issues/12814
|
||||
|
||||
@@ -75,6 +75,7 @@ from synapse.replication.http.federation import (
|
||||
from synapse.state import StateResolutionStore
|
||||
from synapse.storage.databases.main.events import PartialStateConflictError
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
PersistedEventPosition,
|
||||
RoomStreamToken,
|
||||
@@ -82,7 +83,6 @@ from synapse.types import (
|
||||
UserID,
|
||||
get_domain_from_id,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import Linearizer, concurrently_execute
|
||||
from synapse.util.iterutils import batch_iter
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
|
||||
@@ -275,64 +275,49 @@ class IdentityHandler:
|
||||
server doesn't support unbinding
|
||||
"""
|
||||
|
||||
medium = threepid["medium"]
|
||||
address = threepid["address"]
|
||||
|
||||
(changed, stop,) = await self.hs.get_third_party_event_rules().unbind_threepid(
|
||||
mxid, medium, address, id_server
|
||||
)
|
||||
|
||||
# If a module wants to take over unbind it will return stop = True,
|
||||
# in this case we should just purge the table from the 3pid record
|
||||
if not stop:
|
||||
if not valid_id_server_location(id_server):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"id_server must be a valid hostname with optional port and path components",
|
||||
)
|
||||
|
||||
url = "https://%s/_matrix/identity/v2/3pid/unbind" % (id_server,)
|
||||
url_bytes = b"/_matrix/identity/v2/3pid/unbind"
|
||||
|
||||
content = {
|
||||
"mxid": mxid,
|
||||
"threepid": {
|
||||
"medium": threepid["medium"],
|
||||
"address": threepid["address"],
|
||||
},
|
||||
}
|
||||
|
||||
# we abuse the federation http client to sign the request, but we have to send it
|
||||
# using the normal http client since we don't want the SRV lookup and want normal
|
||||
# 'browser-like' HTTPS.
|
||||
auth_headers = self.federation_http_client.build_auth_headers(
|
||||
destination=None,
|
||||
method=b"POST",
|
||||
url_bytes=url_bytes,
|
||||
content=content,
|
||||
destination_is=id_server.encode("ascii"),
|
||||
if not valid_id_server_location(id_server):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"id_server must be a valid hostname with optional port and path components",
|
||||
)
|
||||
headers = {b"Authorization": auth_headers}
|
||||
|
||||
try:
|
||||
# Use the blacklisting http client as this call is only to identity servers
|
||||
# provided by a client
|
||||
await self.blacklisting_http_client.post_json_get_json(
|
||||
url, content, headers
|
||||
)
|
||||
changed &= True
|
||||
except HttpResponseException as e:
|
||||
changed &= False
|
||||
if e.code in (400, 404, 501):
|
||||
# The remote server probably doesn't support unbinding (yet)
|
||||
logger.warning(
|
||||
"Received %d response while unbinding threepid", e.code
|
||||
)
|
||||
else:
|
||||
logger.error("Failed to unbind threepid on identity server: %s", e)
|
||||
raise SynapseError(500, "Failed to contact identity server")
|
||||
except RequestTimedOutError:
|
||||
raise SynapseError(500, "Timed out contacting identity server")
|
||||
url = "https://%s/_matrix/identity/v2/3pid/unbind" % (id_server,)
|
||||
url_bytes = b"/_matrix/identity/v2/3pid/unbind"
|
||||
|
||||
content = {
|
||||
"mxid": mxid,
|
||||
"threepid": {"medium": threepid["medium"], "address": threepid["address"]},
|
||||
}
|
||||
|
||||
# we abuse the federation http client to sign the request, but we have to send it
|
||||
# using the normal http client since we don't want the SRV lookup and want normal
|
||||
# 'browser-like' HTTPS.
|
||||
auth_headers = self.federation_http_client.build_auth_headers(
|
||||
destination=None,
|
||||
method=b"POST",
|
||||
url_bytes=url_bytes,
|
||||
content=content,
|
||||
destination_is=id_server.encode("ascii"),
|
||||
)
|
||||
headers = {b"Authorization": auth_headers}
|
||||
|
||||
try:
|
||||
# Use the blacklisting http client as this call is only to identity servers
|
||||
# provided by a client
|
||||
await self.blacklisting_http_client.post_json_get_json(
|
||||
url, content, headers
|
||||
)
|
||||
changed = True
|
||||
except HttpResponseException as e:
|
||||
changed = False
|
||||
if e.code in (400, 404, 501):
|
||||
# The remote server probably doesn't support unbinding (yet)
|
||||
logger.warning("Received %d response while unbinding threepid", e.code)
|
||||
else:
|
||||
logger.error("Failed to unbind threepid on identity server: %s", e)
|
||||
raise SynapseError(500, "Failed to contact identity server")
|
||||
except RequestTimedOutError:
|
||||
raise SynapseError(500, "Timed out contacting identity server")
|
||||
|
||||
await self.store.remove_user_bound_threepid(
|
||||
user_id=mxid,
|
||||
|
||||
@@ -59,6 +59,7 @@ from synapse.replication.http.send_event import ReplicationSendEventRestServlet
|
||||
from synapse.replication.http.send_events import ReplicationSendEventsRestServlet
|
||||
from synapse.storage.databases.main.events import PartialStateConflictError
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
MutableStateMap,
|
||||
PersistedEventPosition,
|
||||
@@ -69,7 +70,6 @@ from synapse.types import (
|
||||
UserID,
|
||||
create_requester,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import json_decoder, json_encoder, log_failure, unwrapFirstError
|
||||
from synapse.util.async_helpers import Linearizer, gather_results
|
||||
from synapse.util.caches.expiringcache import ExpiringCache
|
||||
|
||||
@@ -27,9 +27,9 @@ from synapse.handlers.room import ShutdownRoomResponse
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.rest.admin._base import assert_user_is_admin
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict, Requester, StreamKeyType
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import ReadWriteLock
|
||||
from synapse.util.stringutils import random_string
|
||||
from synapse.visibility import filter_events_for_client
|
||||
|
||||
@@ -1692,12 +1692,10 @@ class PresenceEventSource(EventSource[int, UserPresenceState]):
|
||||
|
||||
if from_key is not None:
|
||||
# First get all users that have had a presence update
|
||||
result = stream_change_cache.get_all_entities_changed(from_key)
|
||||
updated_users = stream_change_cache.get_all_entities_changed(from_key)
|
||||
|
||||
# Cross-reference users we're interested in with those that have had updates.
|
||||
if result.hit:
|
||||
updated_users = result.entities
|
||||
|
||||
if updated_users is not None:
|
||||
# If we have the full list of changes for presence we can
|
||||
# simply check which ones share a room with the user.
|
||||
get_updates_counter.labels("stream").inc()
|
||||
@@ -1769,9 +1767,9 @@ class PresenceEventSource(EventSource[int, UserPresenceState]):
|
||||
updated_users = None
|
||||
if from_key:
|
||||
# Only return updates since the last sync
|
||||
result = self.store.presence_stream_cache.get_all_entities_changed(from_key)
|
||||
if result.hit:
|
||||
updated_users = result.entities
|
||||
updated_users = self.store.presence_stream_cache.get_all_entities_changed(
|
||||
from_key
|
||||
)
|
||||
|
||||
if updated_users is not None:
|
||||
# Get the actual presence update for each change
|
||||
|
||||
@@ -46,8 +46,8 @@ from synapse.replication.http.register import (
|
||||
ReplicationRegisterServlet,
|
||||
)
|
||||
from synapse.spam_checker_api import RegistrationBehaviour
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import RoomAlias, UserID, create_requester
|
||||
from synapse.types.state import StateFilter
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
@@ -62,6 +62,7 @@ from synapse.events.utils import copy_and_fixup_power_levels_contents
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
from synapse.module_api import NOT_SPAM
|
||||
from synapse.rest.admin._base import assert_user_is_admin
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.streams import EventSource
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
@@ -76,7 +77,6 @@ from synapse.types import (
|
||||
UserID,
|
||||
create_requester,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import stringutils
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
from synapse.util.stringutils import parse_and_validate_server_name
|
||||
|
||||
@@ -34,6 +34,7 @@ from synapse.events.snapshot import EventContext
|
||||
from synapse.handlers.profile import MAX_AVATAR_URL_LEN, MAX_DISPLAYNAME_LEN
|
||||
from synapse.logging import opentracing
|
||||
from synapse.module_api import NOT_SPAM
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
Requester,
|
||||
@@ -44,7 +45,6 @@ from synapse.types import (
|
||||
create_requester,
|
||||
get_domain_from_id,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.distributor import user_left_room
|
||||
|
||||
|
||||
@@ -23,8 +23,8 @@ from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.errors import NotFoundError, SynapseError
|
||||
from synapse.api.filtering import Filter
|
||||
from synapse.events import EventBase
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import JsonDict, StreamKeyType, UserID
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.visibility import filter_events_for_client
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -31,24 +31,19 @@ from typing import (
|
||||
import attr
|
||||
from prometheus_client import Counter
|
||||
|
||||
from synapse.api.constants import EventContentFields, EventTypes, Membership
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.filtering import FilterCollection
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.events import EventBase
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
from synapse.logging.context import current_context
|
||||
from synapse.logging.opentracing import (
|
||||
SynapseTags,
|
||||
log_kv,
|
||||
set_tag,
|
||||
start_active_span,
|
||||
trace,
|
||||
)
|
||||
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span
|
||||
from synapse.push.clientformat import format_push_rules_for_user
|
||||
from synapse.storage.databases.main.event_push_actions import RoomNotifCounts
|
||||
from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary
|
||||
from synapse.storage.roommember import MemberSummary
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
DeviceListUpdates,
|
||||
JsonDict,
|
||||
@@ -60,7 +55,6 @@ from synapse.types import (
|
||||
StreamToken,
|
||||
UserID,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import concurrently_execute
|
||||
from synapse.util.caches.expiringcache import ExpiringCache
|
||||
from synapse.util.caches.lrucache import LruCache
|
||||
@@ -1534,12 +1528,10 @@ class SyncHandler:
|
||||
#
|
||||
# If we don't have that info cached then we get all the users that
|
||||
# share a room with our user and check if those users have changed.
|
||||
cache_result = self.store.get_cached_device_list_changes(
|
||||
changed_users = self.store.get_cached_device_list_changes(
|
||||
since_token.device_list_key
|
||||
)
|
||||
if cache_result.hit:
|
||||
changed_users = cache_result.entities
|
||||
|
||||
if changed_users is not None:
|
||||
result = await self.store.get_rooms_for_users(changed_users)
|
||||
|
||||
for changed_user_id, entries in result.items():
|
||||
@@ -1592,7 +1584,6 @@ class SyncHandler:
|
||||
else:
|
||||
return DeviceListUpdates()
|
||||
|
||||
@trace
|
||||
async def _generate_sync_entry_for_to_device(
|
||||
self, sync_result_builder: "SyncResultBuilder"
|
||||
) -> None:
|
||||
@@ -1612,16 +1603,11 @@ class SyncHandler:
|
||||
)
|
||||
|
||||
for message in messages:
|
||||
log_kv(
|
||||
{
|
||||
"event": "to_device_message",
|
||||
"sender": message["sender"],
|
||||
"type": message["type"],
|
||||
EventContentFields.TO_DEVICE_MSGID: message["content"].get(
|
||||
EventContentFields.TO_DEVICE_MSGID
|
||||
),
|
||||
}
|
||||
)
|
||||
# We pop here as we shouldn't be sending the message ID down
|
||||
# `/sync`
|
||||
message_id = message.pop("message_id", None)
|
||||
if message_id:
|
||||
set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
|
||||
|
||||
logger.debug(
|
||||
"Returning %d to-device messages between %d and %d (current token: %d)",
|
||||
|
||||
@@ -420,11 +420,11 @@ class TypingWriterHandler(FollowerTypingHandler):
|
||||
if last_id == current_id:
|
||||
return [], current_id, False
|
||||
|
||||
result = self._typing_stream_change_cache.get_all_entities_changed(last_id)
|
||||
changed_rooms: Optional[
|
||||
Iterable[str]
|
||||
] = self._typing_stream_change_cache.get_all_entities_changed(last_id)
|
||||
|
||||
if result.hit:
|
||||
changed_rooms: Iterable[str] = result.entities
|
||||
else:
|
||||
if changed_rooms is None:
|
||||
changed_rooms = self._room_serials
|
||||
|
||||
rows = []
|
||||
|
||||
@@ -577,24 +577,7 @@ def _unrecognised_request_handler(request: Request) -> NoReturn:
|
||||
Args:
|
||||
request: Unused, but passed in to match the signature of ServletCallback.
|
||||
"""
|
||||
raise UnrecognizedRequestError(code=404)
|
||||
|
||||
|
||||
class UnrecognizedRequestResource(resource.Resource):
|
||||
"""
|
||||
Similar to twisted.web.resource.NoResource, but returns a JSON 404 with an
|
||||
errcode of M_UNRECOGNIZED.
|
||||
"""
|
||||
|
||||
def render(self, request: SynapseRequest) -> int:
|
||||
f = failure.Failure(UnrecognizedRequestError(code=404))
|
||||
return_json_error(f, request, None)
|
||||
# A response has already been sent but Twisted requires either NOT_DONE_YET
|
||||
# or the response bytes as a return value.
|
||||
return NOT_DONE_YET
|
||||
|
||||
def getChild(self, name: str, request: Request) -> resource.Resource:
|
||||
return self
|
||||
raise UnrecognizedRequestError()
|
||||
|
||||
|
||||
class RootRedirect(resource.Resource):
|
||||
|
||||
@@ -292,15 +292,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SynapseTags:
|
||||
# The message ID of any to_device EDU processed
|
||||
TO_DEVICE_EDU_ID = "to_device.edu_id"
|
||||
|
||||
# Details about to-device messages
|
||||
TO_DEVICE_TYPE = "to_device.type"
|
||||
TO_DEVICE_SENDER = "to_device.sender"
|
||||
TO_DEVICE_RECIPIENT = "to_device.recipient"
|
||||
TO_DEVICE_RECIPIENT_DEVICE = "to_device.recipient_device"
|
||||
TO_DEVICE_MSGID = "to_device.msgid" # client-generated ID
|
||||
# The message ID of any to_device message processed
|
||||
TO_DEVICE_MESSAGE_ID = "to_device.message_id"
|
||||
|
||||
# Whether the sync response has new data to be returned to the client.
|
||||
SYNC_RESULT = "sync.new_data"
|
||||
|
||||
@@ -68,7 +68,6 @@ from synapse.events.third_party_rules import (
|
||||
ON_PROFILE_UPDATE_CALLBACK,
|
||||
ON_THREEPID_BIND_CALLBACK,
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK,
|
||||
UNBIND_THREEPID_CALLBACK,
|
||||
)
|
||||
from synapse.handlers.account_data import ON_ACCOUNT_DATA_UPDATED_CALLBACK
|
||||
from synapse.handlers.account_validity import (
|
||||
@@ -112,6 +111,7 @@ from synapse.storage.background_updates import (
|
||||
)
|
||||
from synapse.storage.database import DatabasePool, LoggingTransaction
|
||||
from synapse.storage.databases.main.roommember import ProfileInfo
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
DomainSpecificString,
|
||||
JsonDict,
|
||||
@@ -124,7 +124,6 @@ from synapse.types import (
|
||||
UserProfile,
|
||||
create_requester,
|
||||
)
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import Clock
|
||||
from synapse.util.async_helpers import maybe_awaitable
|
||||
from synapse.util.caches.descriptors import CachedFunction, cached
|
||||
@@ -320,7 +319,6 @@ class ModuleApi:
|
||||
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
|
||||
] = None,
|
||||
on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None,
|
||||
unbind_threepid: Optional[UNBIND_THREEPID_CALLBACK] = None,
|
||||
) -> None:
|
||||
"""Registers callbacks for third party event rules capabilities.
|
||||
|
||||
@@ -337,7 +335,6 @@ class ModuleApi:
|
||||
on_profile_update=on_profile_update,
|
||||
on_user_deactivation_status_changed=on_user_deactivation_status_changed,
|
||||
on_threepid_bind=on_threepid_bind,
|
||||
unbind_threepid=unbind_threepid,
|
||||
)
|
||||
|
||||
def register_presence_router_callbacks(
|
||||
|
||||
@@ -35,8 +35,8 @@ from synapse.events import EventBase, relation_from_event
|
||||
from synapse.events.snapshot import EventContext
|
||||
from synapse.state import POWER_KEY
|
||||
from synapse.storage.databases.main.roommember import EventIdMembership
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.synapse_rust.push import FilteredPushRules, PushRuleEvaluator
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.caches import register_cache
|
||||
from synapse.util.metrics import measure_func
|
||||
from synapse.visibility import filter_event_for_clients_with_state
|
||||
|
||||
@@ -37,8 +37,8 @@ from synapse.push.push_types import (
|
||||
TemplateVars,
|
||||
)
|
||||
from synapse.storage.databases.main.event_push_actions import EmailPushAction
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import StateMap, UserID
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util.async_helpers import concurrently_execute
|
||||
from synapse.visibility import filter_events_for_client
|
||||
|
||||
|
||||
@@ -36,14 +36,12 @@ from synapse.replication.tcp.streams import (
|
||||
TagAccountDataStream,
|
||||
ToDeviceStream,
|
||||
TypingStream,
|
||||
UnPartialStatedRoomStream,
|
||||
)
|
||||
from synapse.replication.tcp.streams.events import (
|
||||
EventsStream,
|
||||
EventsStreamEventRow,
|
||||
EventsStreamRow,
|
||||
)
|
||||
from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStreamRow
|
||||
from synapse.types import PersistedEventPosition, ReadReceipt, StreamKeyType, UserID
|
||||
from synapse.util.async_helpers import Linearizer, timeout_deferred
|
||||
from synapse.util.metrics import Measure
|
||||
@@ -119,7 +117,6 @@ class ReplicationDataHandler:
|
||||
self._streams = hs.get_replication_streams()
|
||||
self._instance_name = hs.get_instance_name()
|
||||
self._typing_handler = hs.get_typing_handler()
|
||||
self._state_storage_controller = hs.get_storage_controllers().state
|
||||
|
||||
self._notify_pushers = hs.config.worker.start_pushers
|
||||
self._pusher_pool = hs.get_pusherpool()
|
||||
@@ -239,14 +236,6 @@ class ReplicationDataHandler:
|
||||
self.notifier.notify_user_joined_room(
|
||||
row.data.event_id, row.data.room_id
|
||||
)
|
||||
elif stream_name == UnPartialStatedRoomStream.NAME:
|
||||
for row in rows:
|
||||
assert isinstance(row, UnPartialStatedRoomStreamRow)
|
||||
|
||||
# Wake up any tasks waiting for the room to be un-partial-stated.
|
||||
self._state_storage_controller.notify_room_un_partial_stated(
|
||||
row.room_id
|
||||
)
|
||||
|
||||
await self._presence_handler.process_replication_rows(
|
||||
stream_name, instance_name, token, rows
|
||||
|
||||
@@ -42,7 +42,6 @@ from synapse.replication.tcp.streams._base import (
|
||||
)
|
||||
from synapse.replication.tcp.streams.events import EventsStream
|
||||
from synapse.replication.tcp.streams.federation import FederationStream
|
||||
from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStream
|
||||
|
||||
STREAMS_MAP = {
|
||||
stream.NAME: stream
|
||||
@@ -62,7 +61,6 @@ STREAMS_MAP = {
|
||||
TagAccountDataStream,
|
||||
AccountDataStream,
|
||||
UserSignatureStream,
|
||||
UnPartialStatedRoomStream,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -82,5 +80,4 @@ __all__ = [
|
||||
"TagAccountDataStream",
|
||||
"AccountDataStream",
|
||||
"UserSignatureStream",
|
||||
"UnPartialStatedRoomStream",
|
||||
]
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.replication.tcp.streams import Stream
|
||||
from synapse.replication.tcp.streams._base import current_token_without_instance
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class UnPartialStatedRoomStreamRow:
|
||||
# ID of the room that has been un-partial-stated.
|
||||
room_id: str
|
||||
|
||||
|
||||
class UnPartialStatedRoomStream(Stream):
|
||||
"""
|
||||
Stream to notify about rooms becoming un-partial-stated;
|
||||
that is, when the background sync finishes such that we now have full state for
|
||||
the room.
|
||||
"""
|
||||
|
||||
NAME = "un_partial_stated_room"
|
||||
ROW_TYPE = UnPartialStatedRoomStreamRow
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
store = hs.get_datastores().main
|
||||
super().__init__(
|
||||
hs.get_instance_name(),
|
||||
# TODO(faster_joins, multiple writers): we need to account for instance names
|
||||
current_token_without_instance(store.get_un_partial_stated_rooms_token),
|
||||
store.get_un_partial_stated_rooms_from_stream,
|
||||
)
|
||||
@@ -13,13 +13,13 @@
|
||||
<body>
|
||||
<header class="mx_Header">
|
||||
{% if app_name == "Riot" %}
|
||||
<img src="https://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||
{% elif app_name == "Vector" %}
|
||||
<img src="https://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||
{% elif app_name == "Element" %}
|
||||
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
|
||||
{% else %}
|
||||
<img src="https://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||
{% endif %}
|
||||
</header>
|
||||
|
||||
|
||||
@@ -21,13 +21,13 @@
|
||||
</td>
|
||||
<td class="logo">
|
||||
{% if app_name == "Riot" %}
|
||||
<img src="https://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||
{% elif app_name == "Vector" %}
|
||||
<img src="https://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||
{% elif app_name == "Element" %}
|
||||
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
|
||||
{% else %}
|
||||
<img src="https://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user