Compare commits
23 Commits
v1.105.1
...
rav/all_co
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2f6cf12255 | ||
|
|
769e9b11cf | ||
|
|
074ef4d75f | ||
|
|
301c9771c4 | ||
|
|
800a5b6ef3 | ||
|
|
8c667759ad | ||
|
|
14e9ab19be | ||
|
|
20c8991a94 | ||
|
|
dcae2b4ba4 | ||
|
|
98f57ea3f2 | ||
|
|
f5b6005559 | ||
|
|
47f3870894 | ||
|
|
6d64f1b2b8 | ||
|
|
1d47532310 | ||
|
|
09f0957b36 | ||
|
|
803f05f60c | ||
|
|
c8e0bed426 | ||
|
|
28f5ad07d3 | ||
|
|
f0d6f14047 | ||
|
|
3a196b3227 | ||
|
|
259442fa4c | ||
|
|
fe4719a268 | ||
|
|
3a30846bd0 |
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
run: docker buildx inspect
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.4.0
|
||||
uses: sigstore/cosign-installer@v3.5.0
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
4
.github/workflows/docs-pr.yaml
vendored
4
.github/workflows/docs-pr.yaml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
|
||||
6
.github/workflows/docs.yaml
vendored
6
.github/workflows/docs.yaml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
@@ -110,7 +110,7 @@ jobs:
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./dev-docs/_build/html
|
||||
|
||||
18
.github/workflows/tests.yml
vendored
18
.github/workflows/tests.yml
vendored
@@ -81,7 +81,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Setup Poetry
|
||||
@@ -208,7 +208,7 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
with:
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@@ -344,7 +344,7 @@ jobs:
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -386,7 +386,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
@@ -498,7 +498,7 @@ jobs:
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Run SyTest
|
||||
@@ -642,7 +642,7 @@ jobs:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
@@ -674,7 +674,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.65.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo test
|
||||
|
||||
14
CHANGES.md
14
CHANGES.md
@@ -1,17 +1,3 @@
|
||||
# Synapse 1.105.1 (2024-04-23)
|
||||
|
||||
## Security advisory
|
||||
|
||||
The following issues are fixed in 1.105.1.
|
||||
|
||||
- [GHSA-3h7q-rfh9-xm4v](https://github.com/element-hq/synapse/security/advisories/GHSA-3h7q-rfh9-xm4v) / [CVE-2024-31208](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-31208) — High Severity
|
||||
|
||||
Weakness in auth chain indexing allows DoS from remote room members through disk fill and high CPU usage.
|
||||
|
||||
See the advisories for more details. If you have any questions, email security@element.io.
|
||||
|
||||
|
||||
|
||||
# Synapse 1.105.0 (2024-04-16)
|
||||
|
||||
No significant changes since 1.105.0rc1.
|
||||
|
||||
96
Cargo.lock
generated
96
Cargo.lock
generated
@@ -13,9 +13,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.81"
|
||||
version = "1.0.82"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247"
|
||||
checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -29,6 +29,12 @@ version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
@@ -53,12 +59,27 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crypto-common"
|
||||
version = "0.1.6"
|
||||
@@ -80,6 +101,12 @@ dependencies = [
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fnv"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.6"
|
||||
@@ -90,6 +117,30 @@ dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "headers"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
"headers-core",
|
||||
"http",
|
||||
"httpdate",
|
||||
"mime",
|
||||
"sha1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "headers-core"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
|
||||
dependencies = [
|
||||
"http",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.4.1"
|
||||
@@ -102,6 +153,23 @@ version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
"itoa",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpdate"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "2.0.4"
|
||||
@@ -122,9 +190,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.135"
|
||||
version = "0.2.153"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
|
||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
@@ -157,6 +225,12 @@ dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mime"
|
||||
version = "0.3.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.15.0"
|
||||
@@ -376,6 +450,17 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha1"
|
||||
version = "0.10.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
@@ -405,7 +490,10 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"blake2",
|
||||
"bytes",
|
||||
"headers",
|
||||
"hex",
|
||||
"http",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"pyo3",
|
||||
|
||||
1
changelog.d/16920.bugfix
Normal file
1
changelog.d/16920.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Adds validation to ensure that the `limit` parameter on `/publicRooms` is non-negative.
|
||||
1
changelog.d/16923.bugfix
Normal file
1
changelog.d/16923.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Return `400 M_NOT_JSON` upon receiving invalid JSON in query parameters across various client and admin endpoints, rather than an internal server error.
|
||||
1
changelog.d/16943.bugfix
Normal file
1
changelog.d/16943.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Make the CSAPI endpoint `/keys/device_signing/upload` idempotent.
|
||||
1
changelog.d/17032.misc
Normal file
1
changelog.d/17032.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar).
|
||||
1
changelog.d/17036.misc
Normal file
1
changelog.d/17036.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix mypy with latest Twisted release.
|
||||
1
changelog.d/17069.doc
Normal file
1
changelog.d/17069.doc
Normal file
@@ -0,0 +1 @@
|
||||
Add a prompt in the contributing guide to manually configure icu4c.
|
||||
1
changelog.d/17079.misc
Normal file
1
changelog.d/17079.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump minimum supported Rust version to 1.66.0.
|
||||
1
changelog.d/17081.misc
Normal file
1
changelog.d/17081.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add helpers to transform Twisted requests to Rust http Requests/Responses.
|
||||
1
changelog.d/17086.feature
Normal file
1
changelog.d/17086.feature
Normal file
@@ -0,0 +1 @@
|
||||
Support delegating the rendezvous mechanism described MSC4108 to an external implementation.
|
||||
1
changelog.d/17096.misc
Normal file
1
changelog.d/17096.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar).
|
||||
1
changelog.d/17099.doc
Normal file
1
changelog.d/17099.doc
Normal file
@@ -0,0 +1 @@
|
||||
Clarify what part of message retention is still experimental.
|
||||
1
changelog.d/17115.misc
Normal file
1
changelog.d/17115.misc
Normal file
@@ -0,0 +1 @@
|
||||
`complement.sh`: run tests from all test packages.
|
||||
6
debian/changelog
vendored
6
debian/changelog
vendored
@@ -1,9 +1,3 @@
|
||||
matrix-synapse-py3 (1.105.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.105.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Apr 2024 15:56:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.105.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.105.0.
|
||||
|
||||
@@ -102,6 +102,8 @@ experimental_features:
|
||||
msc3391_enabled: true
|
||||
# Filtering /messages by relation type.
|
||||
msc3874_enabled: true
|
||||
# no UIA for x-signing upload for the first time
|
||||
msc3967_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
|
||||
@@ -86,6 +86,8 @@ poetry install --extras all
|
||||
This will install the runtime and developer dependencies for the project. Be sure to check
|
||||
that the `poetry install` step completed cleanly.
|
||||
|
||||
For OSX users, be sure to set `PKG_CONFIG_PATH` to support `icu4c`. Run `brew info icu4c` for more details.
|
||||
|
||||
## Running Synapse via poetry
|
||||
|
||||
To start a local instance of Synapse in the locked poetry environment, create a config file:
|
||||
|
||||
@@ -7,8 +7,10 @@ follow the semantics described in
|
||||
and allow server and room admins to configure how long messages should
|
||||
be kept in a homeserver's database before being purged from it.
|
||||
**Please note that, as this feature isn't part of the Matrix
|
||||
specification yet, this implementation is to be considered as
|
||||
experimental.**
|
||||
specification yet, the use of `m.room.retention` events for per-room
|
||||
retention policies is to be considered as experimental. However, the use
|
||||
of a default message retention policy is considered a stable feature
|
||||
in Synapse.**
|
||||
|
||||
A message retention policy is mainly defined by its `max_lifetime`
|
||||
parameter, which defines how long a message can be kept around after
|
||||
|
||||
68
poetry.lock
generated
68
poetry.lock
generated
@@ -1848,17 +1848,17 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1-modules"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
description = "A collection of ASN.1-based protocols modules"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
|
||||
{file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
|
||||
{file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"},
|
||||
{file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pyasn1 = ">=0.4.6,<0.6.0"
|
||||
pyasn1 = ">=0.4.6,<0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
@@ -1983,13 +1983,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
version = "2.2.0"
|
||||
version = "2.3.0"
|
||||
description = "Use the full Github API v3"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "PyGithub-2.2.0-py3-none-any.whl", hash = "sha256:41042ea53e4c372219db708c38d2ca1fd4fadab75475bac27d89d339596cfad1"},
|
||||
{file = "PyGithub-2.2.0.tar.gz", hash = "sha256:e39be7c4dc39418bdd6e3ecab5931c636170b8b21b4d26f9ecf7e6102a3b51c3"},
|
||||
{file = "PyGithub-2.3.0-py3-none-any.whl", hash = "sha256:65b499728be3ce7b0cd2cd760da3b32f0f4d7bc55e5e0677617f90f6564e793e"},
|
||||
{file = "PyGithub-2.3.0.tar.gz", hash = "sha256:0148d7347a1cdeed99af905077010aef81a4dad988b0ba51d4108bf66b443f7e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2444,28 +2444,28 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.3.5"
|
||||
version = "0.3.7"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"},
|
||||
{file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"},
|
||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"},
|
||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"},
|
||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"},
|
||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"},
|
||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"},
|
||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"},
|
||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"},
|
||||
{file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"},
|
||||
{file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"},
|
||||
{file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"},
|
||||
{file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"},
|
||||
{file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"},
|
||||
{file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"},
|
||||
{file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"},
|
||||
{file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"},
|
||||
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"},
|
||||
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"},
|
||||
{file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"},
|
||||
{file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"},
|
||||
{file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"},
|
||||
{file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"},
|
||||
{file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"},
|
||||
{file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2954,13 +2954,13 @@ docs = ["sphinx (<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "twine"
|
||||
version = "4.0.2"
|
||||
version = "5.0.0"
|
||||
description = "Collection of utilities for publishing packages on PyPI"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"},
|
||||
{file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"},
|
||||
{file = "twine-5.0.0-py3-none-any.whl", hash = "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0"},
|
||||
{file = "twine-5.0.0.tar.gz", hash = "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3109,13 +3109,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "10.2.0.20240406"
|
||||
version = "10.2.0.20240415"
|
||||
description = "Typing stubs for Pillow"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-Pillow-10.2.0.20240406.tar.gz", hash = "sha256:62e0cc1f17caba40e72e7154a483f4c7f3bea0e1c34c0ebba9de3c7745bc306d"},
|
||||
{file = "types_Pillow-10.2.0.20240406-py3-none-any.whl", hash = "sha256:5ac182e8afce53de30abca2fdf9cbec7b2500e549d0be84da035a729a84c7c47"},
|
||||
{file = "types-Pillow-10.2.0.20240415.tar.gz", hash = "sha256:dd6058027639bcdc66ba78b228cc25fdae42524c2150c78c804da427e7e76e70"},
|
||||
{file = "types_Pillow-10.2.0.20240415-py3-none-any.whl", hash = "sha256:f933332b7e96010bae9b9cf82a4c9979ff0c270d63f5c5bbffb2d789b85cd00b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3451,4 +3451,4 @@ user-search = ["pyicu"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8.0"
|
||||
content-hash = "4abda113a01f162bb3978b0372956d569364533aa39f57863c234363f8449a4f"
|
||||
content-hash = "1951f2b4623138d47db08a405edd970e67599d05804bb459af21a085e1665f69"
|
||||
|
||||
@@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.105.1"
|
||||
version = "1.105.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
@@ -321,7 +321,7 @@ all = [
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.7.0"
|
||||
ruff = "0.3.5"
|
||||
ruff = "0.3.7"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ name = "synapse"
|
||||
version = "0.1.0"
|
||||
|
||||
edition = "2021"
|
||||
rust-version = "1.65.0"
|
||||
rust-version = "1.66.0"
|
||||
|
||||
[lib]
|
||||
name = "synapse"
|
||||
@@ -23,6 +23,9 @@ name = "synapse.synapse_rust"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.63"
|
||||
bytes = "1.6.0"
|
||||
headers = "0.4.0"
|
||||
http = "1.1.0"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.17"
|
||||
pyo3 = { version = "0.20.0", features = [
|
||||
|
||||
60
rust/src/errors.rs
Normal file
60
rust/src/errors.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
/*
|
||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
*
|
||||
* Copyright (C) 2024 New Vector, Ltd
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* See the GNU Affero General Public License for more details:
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
#![allow(clippy::new_ret_no_self)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use http::{HeaderMap, StatusCode};
|
||||
use pyo3::{exceptions::PyValueError, import_exception};
|
||||
|
||||
import_exception!(synapse.api.errors, SynapseError);
|
||||
|
||||
impl SynapseError {
|
||||
pub fn new(
|
||||
code: StatusCode,
|
||||
message: String,
|
||||
errcode: &'static str,
|
||||
additional_fields: Option<HashMap<String, String>>,
|
||||
headers: Option<HeaderMap>,
|
||||
) -> pyo3::PyErr {
|
||||
// Transform the HeaderMap into a HashMap<String, String>
|
||||
let headers = if let Some(headers) = headers {
|
||||
let mut map = HashMap::with_capacity(headers.len());
|
||||
for (key, value) in headers.iter() {
|
||||
let Ok(value) = value.to_str() else {
|
||||
// This should never happen, but we don't want to panic in case it does
|
||||
return PyValueError::new_err(
|
||||
"Could not construct SynapseError: header value is not valid ASCII",
|
||||
);
|
||||
};
|
||||
|
||||
map.insert(key.as_str().to_owned(), value.to_owned());
|
||||
}
|
||||
Some(map)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
SynapseError::new_err((code.as_u16(), message, errcode, additional_fields, headers))
|
||||
}
|
||||
}
|
||||
|
||||
import_exception!(synapse.api.errors, NotFoundError);
|
||||
|
||||
impl NotFoundError {
|
||||
pub fn new() -> pyo3::PyErr {
|
||||
NotFoundError::new_err(())
|
||||
}
|
||||
}
|
||||
165
rust/src/http.rs
Normal file
165
rust/src/http.rs
Normal file
@@ -0,0 +1,165 @@
|
||||
/*
|
||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
*
|
||||
* Copyright (C) 2024 New Vector, Ltd
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* See the GNU Affero General Public License for more details:
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
use bytes::{Buf, BufMut, Bytes, BytesMut};
|
||||
use headers::{Header, HeaderMapExt};
|
||||
use http::{HeaderName, HeaderValue, Method, Request, Response, StatusCode, Uri};
|
||||
use pyo3::{
|
||||
exceptions::PyValueError,
|
||||
types::{PyBytes, PySequence, PyTuple},
|
||||
PyAny, PyResult,
|
||||
};
|
||||
|
||||
use crate::errors::SynapseError;
|
||||
|
||||
/// Read a file-like Python object by chunks
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if calling the `read` on the Python object failed
|
||||
fn read_io_body(body: &PyAny, chunk_size: usize) -> PyResult<Bytes> {
|
||||
let mut buf = BytesMut::new();
|
||||
loop {
|
||||
let bytes: &PyBytes = body.call_method1("read", (chunk_size,))?.downcast()?;
|
||||
if bytes.as_bytes().is_empty() {
|
||||
return Ok(buf.into());
|
||||
}
|
||||
buf.put(bytes.as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
/// Transform a Twisted `IRequest` to an [`http::Request`]
|
||||
///
|
||||
/// It uses the following members of `IRequest`:
|
||||
/// - `content`, which is expected to be a file-like object with a `read` method
|
||||
/// - `uri`, which is expected to be a valid URI as `bytes`
|
||||
/// - `method`, which is expected to be a valid HTTP method as `bytes`
|
||||
/// - `requestHeaders`, which is expected to have a `getAllRawHeaders` method
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if the Python object doesn't properly implement `IRequest`
|
||||
pub fn http_request_from_twisted(request: &PyAny) -> PyResult<Request<Bytes>> {
|
||||
let content = request.getattr("content")?;
|
||||
let body = read_io_body(content, 4096)?;
|
||||
|
||||
let mut req = Request::new(body);
|
||||
|
||||
let uri: &PyBytes = request.getattr("uri")?.downcast()?;
|
||||
*req.uri_mut() =
|
||||
Uri::try_from(uri.as_bytes()).map_err(|_| PyValueError::new_err("invalid uri"))?;
|
||||
|
||||
let method: &PyBytes = request.getattr("method")?.downcast()?;
|
||||
*req.method_mut() = Method::from_bytes(method.as_bytes())
|
||||
.map_err(|_| PyValueError::new_err("invalid method"))?;
|
||||
|
||||
let headers_iter = request
|
||||
.getattr("requestHeaders")?
|
||||
.call_method0("getAllRawHeaders")?
|
||||
.iter()?;
|
||||
|
||||
for header in headers_iter {
|
||||
let header = header?;
|
||||
let header: &PyTuple = header.downcast()?;
|
||||
let name: &PyBytes = header.get_item(0)?.downcast()?;
|
||||
let name = HeaderName::from_bytes(name.as_bytes())
|
||||
.map_err(|_| PyValueError::new_err("invalid header name"))?;
|
||||
|
||||
let values: &PySequence = header.get_item(1)?.downcast()?;
|
||||
for index in 0..values.len()? {
|
||||
let value: &PyBytes = values.get_item(index)?.downcast()?;
|
||||
let value = HeaderValue::from_bytes(value.as_bytes())
|
||||
.map_err(|_| PyValueError::new_err("invalid header value"))?;
|
||||
req.headers_mut().append(name.clone(), value);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(req)
|
||||
}
|
||||
|
||||
/// Send an [`http::Response`] through a Twisted `IRequest`
|
||||
///
|
||||
/// It uses the following members of `IRequest`:
|
||||
///
|
||||
/// - `responseHeaders`, which is expected to have a `addRawHeader(bytes, bytes)` method
|
||||
/// - `setResponseCode(int)` method
|
||||
/// - `write(bytes)` method
|
||||
/// - `finish()` method
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if the Python object doesn't properly implement `IRequest`
|
||||
pub fn http_response_to_twisted<B>(request: &PyAny, response: Response<B>) -> PyResult<()>
|
||||
where
|
||||
B: Buf,
|
||||
{
|
||||
let (parts, mut body) = response.into_parts();
|
||||
|
||||
request.call_method1("setResponseCode", (parts.status.as_u16(),))?;
|
||||
|
||||
let response_headers = request.getattr("responseHeaders")?;
|
||||
for (name, value) in parts.headers.iter() {
|
||||
response_headers.call_method1("addRawHeader", (name.as_str(), value.as_bytes()))?;
|
||||
}
|
||||
|
||||
while body.remaining() != 0 {
|
||||
let chunk = body.chunk();
|
||||
request.call_method1("write", (chunk,))?;
|
||||
body.advance(chunk.len());
|
||||
}
|
||||
|
||||
request.call_method0("finish")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// An extension trait for [`HeaderMap`] that provides typed access to headers, and throws the
|
||||
/// right python exceptions when the header is missing or fails to parse.
|
||||
///
|
||||
/// [`HeaderMap`]: headers::HeaderMap
|
||||
pub trait HeaderMapPyExt: HeaderMapExt {
|
||||
/// Get a header from the map, returning an error if it is missing or invalid.
|
||||
fn typed_get_required<H>(&self) -> PyResult<H>
|
||||
where
|
||||
H: Header,
|
||||
{
|
||||
self.typed_get_optional::<H>()?.ok_or_else(|| {
|
||||
SynapseError::new(
|
||||
StatusCode::BAD_REQUEST,
|
||||
format!("Missing required header: {}", H::name()),
|
||||
"M_MISSING_PARAM",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Get a header from the map, returning `None` if it is missing and an error if it is invalid.
|
||||
fn typed_get_optional<H>(&self) -> PyResult<Option<H>>
|
||||
where
|
||||
H: Header,
|
||||
{
|
||||
self.typed_try_get::<H>().map_err(|_| {
|
||||
SynapseError::new(
|
||||
StatusCode::BAD_REQUEST,
|
||||
format!("Invalid header: {}", H::name()),
|
||||
"M_INVALID_PARAM",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HeaderMapExt> HeaderMapPyExt for T {}
|
||||
@@ -3,7 +3,9 @@ use pyo3::prelude::*;
|
||||
use pyo3_log::ResetHandle;
|
||||
|
||||
pub mod acl;
|
||||
pub mod errors;
|
||||
pub mod events;
|
||||
pub mod http;
|
||||
pub mod push;
|
||||
|
||||
lazy_static! {
|
||||
|
||||
@@ -214,8 +214,6 @@ fi
|
||||
|
||||
extra_test_args=()
|
||||
|
||||
test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902"
|
||||
|
||||
# Enable dirty runs, so tests will reuse the same container where possible.
|
||||
# This significantly speeds up tests, but increases the possibility of test pollution.
|
||||
export COMPLEMENT_ENABLE_DIRTY_RUNS=1
|
||||
@@ -278,7 +276,12 @@ fi
|
||||
export PASS_SYNAPSE_LOG_TESTING=1
|
||||
|
||||
# Run the tests!
|
||||
echo "Images built; running complement with ${extra_test_args[@]} $@ $test_packages"
|
||||
cd "$COMPLEMENT_DIR"
|
||||
|
||||
go test -v -tags "synapse_blacklist" -count=1 "${extra_test_args[@]}" "$@" $test_packages
|
||||
# This isn't whitespace-safe but *does* work on the prehistoric version of bash
|
||||
# on OSX.
|
||||
test_packages=( $(find ./tests -type d) )
|
||||
|
||||
echo "Images built; running complement with ${extra_test_args[@]} $@ ${test_packages[@]}"
|
||||
|
||||
go test -v -tags "synapse_blacklist" -count=1 "${extra_test_args[@]}" "$@" "${test_packages[@]}"
|
||||
|
||||
@@ -411,3 +411,14 @@ class ExperimentalConfig(Config):
|
||||
self.msc4069_profile_inhibit_propagation = experimental.get(
|
||||
"msc4069_profile_inhibit_propagation", False
|
||||
)
|
||||
|
||||
# MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code
|
||||
self.msc4108_delegation_endpoint: Optional[str] = experimental.get(
|
||||
"msc4108_delegation_endpoint", None
|
||||
)
|
||||
|
||||
if self.msc4108_delegation_endpoint is not None and not self.msc3861.enabled:
|
||||
raise ConfigError(
|
||||
"MSC4108 requires MSC3861 to be enabled",
|
||||
("experimental", "msc4108_delegation_endpoint"),
|
||||
)
|
||||
|
||||
@@ -1476,6 +1476,42 @@ class E2eKeysHandler:
|
||||
else:
|
||||
return exists, self.clock.time_msec() < ts_replacable_without_uia_before
|
||||
|
||||
async def has_different_keys(self, user_id: str, body: JsonDict) -> bool:
|
||||
"""
|
||||
Check if a key provided in `body` differs from the same key stored in the DB. Returns
|
||||
true on the first difference. If a key exists in `body` but does not exist in the DB,
|
||||
returns True. If `body` has no keys, this always returns False.
|
||||
Note by 'key' we mean Matrix key rather than JSON key.
|
||||
|
||||
The purpose of this function is to detect whether or not we need to apply UIA checks.
|
||||
We must apply UIA checks if any key in the database is being overwritten. If a key is
|
||||
being inserted for the first time, or if the key exactly matches what is in the database,
|
||||
then no UIA check needs to be performed.
|
||||
|
||||
Args:
|
||||
user_id: The user who sent the `body`.
|
||||
body: The JSON request body from POST /keys/device_signing/upload
|
||||
Returns:
|
||||
True if any key in `body` has a different value in the database.
|
||||
"""
|
||||
# Ensure that each key provided in the request body exactly matches the one we have stored.
|
||||
# The first time we see the DB having a different key to the matching request key, bail.
|
||||
# Note: we do not care if the DB has a key which the request does not specify, as we only
|
||||
# care about *replacements* or *insertions* (i.e UPSERT)
|
||||
req_body_key_to_db_key = {
|
||||
"master_key": "master",
|
||||
"self_signing_key": "self_signing",
|
||||
"user_signing_key": "user_signing",
|
||||
}
|
||||
for req_body_key, db_key in req_body_key_to_db_key.items():
|
||||
if req_body_key in body:
|
||||
existing_key = await self.store.get_e2e_cross_signing_key(
|
||||
user_id, db_key
|
||||
)
|
||||
if existing_key != body[req_body_key]:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _check_cross_signing_key(
|
||||
key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None
|
||||
|
||||
@@ -262,7 +262,8 @@ class _ProxyResponseBody(protocol.Protocol):
|
||||
self._request.finish()
|
||||
else:
|
||||
# Abort the underlying request since our remote request also failed.
|
||||
self._request.transport.abortConnection()
|
||||
if self._request.channel:
|
||||
self._request.channel.forceAbortClient()
|
||||
|
||||
|
||||
class ProxySite(Site):
|
||||
|
||||
@@ -153,9 +153,9 @@ def return_json_error(
|
||||
# Only respond with an error response if we haven't already started writing,
|
||||
# otherwise lets just kill the connection
|
||||
if request.startedWriting:
|
||||
if request.transport:
|
||||
if request.channel:
|
||||
try:
|
||||
request.transport.abortConnection()
|
||||
request.channel.forceAbortClient()
|
||||
except Exception:
|
||||
# abortConnection throws if the connection is already closed
|
||||
pass
|
||||
@@ -909,7 +909,18 @@ def set_cors_headers(request: "SynapseRequest") -> None:
|
||||
request.setHeader(
|
||||
b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS"
|
||||
)
|
||||
if request.experimental_cors_msc3886:
|
||||
if request.path is not None and request.path.startswith(
|
||||
b"/_matrix/client/unstable/org.matrix.msc4108/rendezvous"
|
||||
):
|
||||
request.setHeader(
|
||||
b"Access-Control-Allow-Headers",
|
||||
b"Content-Type, If-Match, If-None-Match",
|
||||
)
|
||||
request.setHeader(
|
||||
b"Access-Control-Expose-Headers",
|
||||
b"Synapse-Trace-Id, Server, ETag",
|
||||
)
|
||||
elif request.experimental_cors_msc3886:
|
||||
request.setHeader(
|
||||
b"Access-Control-Allow-Headers",
|
||||
b"X-Requested-With, Content-Type, Authorization, Date, If-Match, If-None-Match",
|
||||
|
||||
@@ -19,9 +19,11 @@
|
||||
#
|
||||
#
|
||||
|
||||
""" This module contains base REST classes for constructing REST servlets. """
|
||||
"""This module contains base REST classes for constructing REST servlets."""
|
||||
|
||||
import enum
|
||||
import logging
|
||||
import urllib.parse as urlparse
|
||||
from http import HTTPStatus
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
@@ -65,17 +67,49 @@ def parse_integer(request: Request, name: str, default: int) -> int: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: ...
|
||||
def parse_integer(
|
||||
request: Request, name: str, *, default: int, negative: bool
|
||||
) -> int: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(
|
||||
request: Request, name: str, default: Optional[int] = None, required: bool = False
|
||||
request: Request, name: str, *, default: int, negative: bool = False
|
||||
) -> int: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(
|
||||
request: Request, name: str, *, required: Literal[True], negative: bool = False
|
||||
) -> int: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(
|
||||
request: Request, name: str, *, default: Literal[None], negative: bool = False
|
||||
) -> None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(request: Request, name: str, *, negative: bool) -> Optional[int]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer(
|
||||
request: Request,
|
||||
name: str,
|
||||
default: Optional[int] = None,
|
||||
required: bool = False,
|
||||
negative: bool = False,
|
||||
) -> Optional[int]: ...
|
||||
|
||||
|
||||
def parse_integer(
|
||||
request: Request, name: str, default: Optional[int] = None, required: bool = False
|
||||
request: Request,
|
||||
name: str,
|
||||
default: Optional[int] = None,
|
||||
required: bool = False,
|
||||
negative: bool = False,
|
||||
) -> Optional[int]:
|
||||
"""Parse an integer parameter from the request string
|
||||
|
||||
@@ -85,16 +119,17 @@ def parse_integer(
|
||||
default: value to use if the parameter is absent, defaults to None.
|
||||
required: whether to raise a 400 SynapseError if the parameter is absent,
|
||||
defaults to False.
|
||||
|
||||
negative: whether to allow negative integers, defaults to True.
|
||||
Returns:
|
||||
An int value or the default.
|
||||
|
||||
Raises:
|
||||
SynapseError: if the parameter is absent and required, or if the
|
||||
parameter is present and not an integer.
|
||||
SynapseError: if the parameter is absent and required, if the
|
||||
parameter is present and not an integer, or if the
|
||||
parameter is illegitimate negative.
|
||||
"""
|
||||
args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore
|
||||
return parse_integer_from_args(args, name, default, required)
|
||||
return parse_integer_from_args(args, name, default, required, negative)
|
||||
|
||||
|
||||
@overload
|
||||
@@ -120,6 +155,7 @@ def parse_integer_from_args(
|
||||
name: str,
|
||||
default: Optional[int] = None,
|
||||
required: bool = False,
|
||||
negative: bool = False,
|
||||
) -> Optional[int]: ...
|
||||
|
||||
|
||||
@@ -128,6 +164,7 @@ def parse_integer_from_args(
|
||||
name: str,
|
||||
default: Optional[int] = None,
|
||||
required: bool = False,
|
||||
negative: bool = True,
|
||||
) -> Optional[int]:
|
||||
"""Parse an integer parameter from the request string
|
||||
|
||||
@@ -137,33 +174,37 @@ def parse_integer_from_args(
|
||||
default: value to use if the parameter is absent, defaults to None.
|
||||
required: whether to raise a 400 SynapseError if the parameter is absent,
|
||||
defaults to False.
|
||||
negative: whether to allow negative integers, defaults to True.
|
||||
|
||||
Returns:
|
||||
An int value or the default.
|
||||
|
||||
Raises:
|
||||
SynapseError: if the parameter is absent and required, or if the
|
||||
parameter is present and not an integer.
|
||||
SynapseError: if the parameter is absent and required, if the
|
||||
parameter is present and not an integer, or if the
|
||||
parameter is illegitimate negative.
|
||||
"""
|
||||
name_bytes = name.encode("ascii")
|
||||
|
||||
if name_bytes in args:
|
||||
try:
|
||||
return int(args[name_bytes][0])
|
||||
except Exception:
|
||||
message = "Query parameter %r must be an integer" % (name,)
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM
|
||||
)
|
||||
else:
|
||||
if required:
|
||||
message = "Missing integer query parameter %r" % (name,)
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
else:
|
||||
if name_bytes not in args:
|
||||
if not required:
|
||||
return default
|
||||
|
||||
message = f"Missing required integer query parameter {name}"
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM)
|
||||
|
||||
try:
|
||||
integer = int(args[name_bytes][0])
|
||||
except Exception:
|
||||
message = f"Query parameter {name} must be an integer"
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM)
|
||||
|
||||
if not negative and integer < 0:
|
||||
message = f"Query parameter {name} must be a positive integer."
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM)
|
||||
|
||||
return integer
|
||||
|
||||
|
||||
@overload
|
||||
def parse_boolean(request: Request, name: str, default: bool) -> bool: ...
|
||||
@@ -410,6 +451,87 @@ def parse_string(
|
||||
)
|
||||
|
||||
|
||||
def parse_json(
|
||||
request: Request,
|
||||
name: str,
|
||||
default: Optional[dict] = None,
|
||||
required: bool = False,
|
||||
encoding: str = "ascii",
|
||||
) -> Optional[JsonDict]:
|
||||
"""
|
||||
Parse a JSON parameter from the request query string.
|
||||
|
||||
Args:
|
||||
request: the twisted HTTP request.
|
||||
name: the name of the query parameter.
|
||||
default: value to use if the parameter is absent,
|
||||
defaults to None.
|
||||
required: whether to raise a 400 SynapseError if the
|
||||
parameter is absent, defaults to False.
|
||||
encoding: The encoding to decode the string content with.
|
||||
|
||||
Returns:
|
||||
A JSON value, or `default` if the named query parameter was not found
|
||||
and `required` was False.
|
||||
|
||||
Raises:
|
||||
SynapseError if the parameter is absent and required, or if the
|
||||
parameter is present and not a JSON object.
|
||||
"""
|
||||
args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore
|
||||
return parse_json_from_args(
|
||||
args,
|
||||
name,
|
||||
default,
|
||||
required=required,
|
||||
encoding=encoding,
|
||||
)
|
||||
|
||||
|
||||
def parse_json_from_args(
|
||||
args: Mapping[bytes, Sequence[bytes]],
|
||||
name: str,
|
||||
default: Optional[dict] = None,
|
||||
required: bool = False,
|
||||
encoding: str = "ascii",
|
||||
) -> Optional[JsonDict]:
|
||||
"""
|
||||
Parse a JSON parameter from the request query string.
|
||||
|
||||
Args:
|
||||
args: a mapping of request args as bytes to a list of bytes (e.g. request.args).
|
||||
name: the name of the query parameter.
|
||||
default: value to use if the parameter is absent,
|
||||
defaults to None.
|
||||
required: whether to raise a 400 SynapseError if the
|
||||
parameter is absent, defaults to False.
|
||||
encoding: the encoding to decode the string content with.
|
||||
|
||||
A JSON value, or `default` if the named query parameter was not found
|
||||
and `required` was False.
|
||||
|
||||
Raises:
|
||||
SynapseError if the parameter is absent and required, or if the
|
||||
parameter is present and not a JSON object.
|
||||
"""
|
||||
name_bytes = name.encode("ascii")
|
||||
|
||||
if name_bytes not in args:
|
||||
if not required:
|
||||
return default
|
||||
|
||||
message = f"Missing required integer query parameter {name}"
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM)
|
||||
|
||||
json_str = parse_string_from_args(args, name, required=True, encoding=encoding)
|
||||
|
||||
try:
|
||||
return json_decoder.decode(urlparse.unquote(json_str))
|
||||
except Exception:
|
||||
message = f"Query parameter {name} must be a valid JSON object"
|
||||
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.NOT_JSON)
|
||||
|
||||
|
||||
EnumT = TypeVar("EnumT", bound=enum.Enum)
|
||||
|
||||
|
||||
|
||||
@@ -150,7 +150,8 @@ class SynapseRequest(Request):
|
||||
self.get_method(),
|
||||
self.get_redacted_uri(),
|
||||
)
|
||||
self.transport.abortConnection()
|
||||
if self.channel:
|
||||
self.channel.forceAbortClient()
|
||||
return
|
||||
super().handleContentChunk(data)
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.constants import Direction
|
||||
from synapse.api.errors import Codes, NotFoundError, SynapseError
|
||||
from synapse.api.errors import NotFoundError, SynapseError
|
||||
from synapse.federation.transport.server import Authenticator
|
||||
from synapse.http.servlet import RestServlet, parse_enum, parse_integer, parse_string
|
||||
from synapse.http.site import SynapseRequest
|
||||
@@ -61,22 +61,8 @@ class ListDestinationsRestServlet(RestServlet):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self._auth, request)
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
destination = parse_string(request, "destination")
|
||||
|
||||
@@ -195,22 +181,8 @@ class DestinationMembershipRestServlet(RestServlet):
|
||||
if not await self._store.is_destination_known(destination):
|
||||
raise NotFoundError("Unknown destination")
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS)
|
||||
|
||||
|
||||
@@ -311,29 +311,17 @@ class DeleteMediaByDateSize(RestServlet):
|
||||
) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
before_ts = parse_integer(request, "before_ts", required=True)
|
||||
size_gt = parse_integer(request, "size_gt", default=0)
|
||||
before_ts = parse_integer(request, "before_ts", required=True, negative=False)
|
||||
size_gt = parse_integer(request, "size_gt", default=0, negative=False)
|
||||
keep_profiles = parse_boolean(request, "keep_profiles", default=True)
|
||||
|
||||
if before_ts < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter before_ts must be a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
elif before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds
|
||||
if before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter before_ts you provided is from the year 1970. "
|
||||
+ "Double check that you are providing a timestamp in milliseconds.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
if size_gt < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter size_gt must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
# This check is useless, we keep it for the legacy endpoint only.
|
||||
if server_name is not None and self.server_name != server_name:
|
||||
@@ -389,22 +377,8 @@ class UserMediaRestServlet(RestServlet):
|
||||
if user is None:
|
||||
raise NotFoundError("Unknown user")
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
# If neither `order_by` nor `dir` is set, set the default order
|
||||
# to newest media is on top for backward compatibility.
|
||||
@@ -447,22 +421,8 @@ class UserMediaRestServlet(RestServlet):
|
||||
if user is None:
|
||||
raise NotFoundError("Unknown user")
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
# If neither `order_by` nor `dir` is set, set the default order
|
||||
# to newest media is on top for backward compatibility.
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
import logging
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple, cast
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import attr
|
||||
|
||||
@@ -38,6 +37,7 @@ from synapse.http.servlet import (
|
||||
assert_params_in_dict,
|
||||
parse_enum,
|
||||
parse_integer,
|
||||
parse_json,
|
||||
parse_json_object_from_request,
|
||||
parse_string,
|
||||
)
|
||||
@@ -51,7 +51,6 @@ from synapse.storage.databases.main.room import RoomSortOrder
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict, RoomID, ScheduledTask, UserID, create_requester
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import json_decoder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.api.auth import Auth
|
||||
@@ -776,14 +775,8 @@ class RoomEventContextServlet(RestServlet):
|
||||
limit = parse_integer(request, "limit", default=10)
|
||||
|
||||
# picking the API shape for symmetry with /messages
|
||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter: Optional[Filter] = Filter(
|
||||
self._hs, json_decoder.decode(filter_json)
|
||||
)
|
||||
else:
|
||||
event_filter = None
|
||||
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||
|
||||
event_context = await self.room_context_handler.get_event_context(
|
||||
requester,
|
||||
@@ -914,21 +907,16 @@ class RoomMessagesRestServlet(RestServlet):
|
||||
)
|
||||
# Twisted will have processed the args by now.
|
||||
assert request.args is not None
|
||||
|
||||
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||
|
||||
as_client_event = b"raw" not in request.args
|
||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter: Optional[Filter] = Filter(
|
||||
self._hs, json_decoder.decode(filter_json)
|
||||
)
|
||||
if (
|
||||
event_filter
|
||||
and event_filter.filter_json.get("event_format", "client")
|
||||
== "federation"
|
||||
):
|
||||
as_client_event = False
|
||||
else:
|
||||
event_filter = None
|
||||
if (
|
||||
event_filter
|
||||
and event_filter.filter_json.get("event_format", "client") == "federation"
|
||||
):
|
||||
as_client_event = False
|
||||
|
||||
msgs = await self._pagination_handler.get_messages(
|
||||
room_id=room_id,
|
||||
|
||||
@@ -63,38 +63,12 @@ class UserMediaStatisticsRestServlet(RestServlet):
|
||||
),
|
||||
)
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
from_ts = parse_integer(request, "from_ts", default=0, negative=False)
|
||||
until_ts = parse_integer(request, "until_ts", negative=False)
|
||||
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
from_ts = parse_integer(request, "from_ts", default=0)
|
||||
if from_ts < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from_ts must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
until_ts = parse_integer(request, "until_ts")
|
||||
if until_ts is not None:
|
||||
if until_ts < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter until_ts must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
if until_ts <= from_ts:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
|
||||
@@ -90,22 +90,8 @@ class UsersRestServletV2(RestServlet):
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter from must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if limit < 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter limit must be a string representing a positive integer.",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
start = parse_integer(request, "from", default=0, negative=False)
|
||||
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||
|
||||
user_id = parse_string(request, "user_id")
|
||||
name = parse_string(request, "name", encoding="utf-8")
|
||||
|
||||
@@ -409,7 +409,18 @@ class SigningKeyUploadServlet(RestServlet):
|
||||
# But first-time setup is fine
|
||||
|
||||
elif self.hs.config.experimental.msc3967_enabled:
|
||||
# If we already have a master key then cross signing is set up and we require UIA to reset
|
||||
# MSC3967 allows this endpoint to 200 OK for idempotency. Resending exactly the same
|
||||
# keys should just 200 OK without doing a UIA prompt.
|
||||
keys_are_different = await self.e2e_keys_handler.has_different_keys(
|
||||
user_id, body
|
||||
)
|
||||
if not keys_are_different:
|
||||
# FIXME: we do not fallthrough to upload_signing_keys_for_user because confusingly
|
||||
# if we do, we 500 as it looks like it tries to INSERT the same key twice, causing a
|
||||
# unique key constraint violation. This sounds like a bug?
|
||||
return 200, {}
|
||||
# the keys are different, is x-signing set up? If no, then the keys don't exist which is
|
||||
# why they are different. If yes, then we need to UIA to change them.
|
||||
if is_cross_signing_setup:
|
||||
await self.auth_handler.validate_user_via_ui_auth(
|
||||
requester,
|
||||
@@ -420,7 +431,6 @@ class SigningKeyUploadServlet(RestServlet):
|
||||
can_skip_ui_auth=False,
|
||||
)
|
||||
# Otherwise we don't require UIA since we are setting up cross signing for first time
|
||||
|
||||
else:
|
||||
# Previous behaviour is to always require UIA but allow it to be skipped
|
||||
await self.auth_handler.validate_user_via_ui_auth(
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
# Copyright (C) 2023-2024 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
@@ -34,7 +34,7 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RendezvousServlet(RestServlet):
|
||||
class MSC3886RendezvousServlet(RestServlet):
|
||||
"""
|
||||
This is a placeholder implementation of [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886)
|
||||
simple client rendezvous capability that is used by the "Sign in with QR" functionality.
|
||||
@@ -76,6 +76,30 @@ class RendezvousServlet(RestServlet):
|
||||
# PUT, GET and DELETE are not implemented as they should be fulfilled by the redirect target.
|
||||
|
||||
|
||||
class MSC4108DelegationRendezvousServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True
|
||||
)
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
redirection_target: Optional[str] = (
|
||||
hs.config.experimental.msc4108_delegation_endpoint
|
||||
)
|
||||
assert (
|
||||
redirection_target is not None
|
||||
), "Servlet is only registered if there is a delegation target"
|
||||
self.endpoint = redirection_target.encode("utf-8")
|
||||
|
||||
async def on_POST(self, request: SynapseRequest) -> None:
|
||||
respond_with_redirect(
|
||||
request, self.endpoint, statusCode=TEMPORARY_REDIRECT, cors=True
|
||||
)
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
if hs.config.experimental.msc3886_endpoint is not None:
|
||||
RendezvousServlet(hs).register(http_server)
|
||||
MSC3886RendezvousServlet(hs).register(http_server)
|
||||
|
||||
if hs.config.experimental.msc4108_delegation_endpoint is not None:
|
||||
MSC4108DelegationRendezvousServlet(hs).register(http_server)
|
||||
|
||||
@@ -52,6 +52,7 @@ from synapse.http.servlet import (
|
||||
parse_boolean,
|
||||
parse_enum,
|
||||
parse_integer,
|
||||
parse_json,
|
||||
parse_json_object_from_request,
|
||||
parse_string,
|
||||
parse_strings_from_args,
|
||||
@@ -65,7 +66,6 @@ from synapse.rest.client.transactions import HttpTransactionCache
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict, Requester, StreamToken, ThirdPartyInstanceID, UserID
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.cancellation import cancellable
|
||||
from synapse.util.stringutils import parse_and_validate_server_name, random_string
|
||||
|
||||
@@ -499,7 +499,7 @@ class PublicRoomListRestServlet(RestServlet):
|
||||
if server:
|
||||
raise e
|
||||
|
||||
limit: Optional[int] = parse_integer(request, "limit", 0)
|
||||
limit: Optional[int] = parse_integer(request, "limit", 0, negative=False)
|
||||
since_token = parse_string(request, "since")
|
||||
|
||||
if limit == 0:
|
||||
@@ -703,21 +703,16 @@ class RoomMessageListRestServlet(RestServlet):
|
||||
)
|
||||
# Twisted will have processed the args by now.
|
||||
assert request.args is not None
|
||||
|
||||
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||
|
||||
as_client_event = b"raw" not in request.args
|
||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter: Optional[Filter] = Filter(
|
||||
self._hs, json_decoder.decode(filter_json)
|
||||
)
|
||||
if (
|
||||
event_filter
|
||||
and event_filter.filter_json.get("event_format", "client")
|
||||
== "federation"
|
||||
):
|
||||
as_client_event = False
|
||||
else:
|
||||
event_filter = None
|
||||
if (
|
||||
event_filter
|
||||
and event_filter.filter_json.get("event_format", "client") == "federation"
|
||||
):
|
||||
as_client_event = False
|
||||
|
||||
msgs = await self.pagination_handler.get_messages(
|
||||
room_id=room_id,
|
||||
@@ -898,14 +893,8 @@ class RoomEventContextServlet(RestServlet):
|
||||
limit = parse_integer(request, "limit", default=10)
|
||||
|
||||
# picking the API shape for symmetry with /messages
|
||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter: Optional[Filter] = Filter(
|
||||
self._hs, json_decoder.decode(filter_json)
|
||||
)
|
||||
else:
|
||||
event_filter = None
|
||||
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||
|
||||
event_context = await self.room_context_handler.get_event_context(
|
||||
requester, room_id, event_id, limit, event_filter
|
||||
|
||||
@@ -140,6 +140,9 @@ class VersionsRestServlet(RestServlet):
|
||||
"org.matrix.msc4069": self.config.experimental.msc4069_profile_inhibit_propagation,
|
||||
# Allows clients to handle push for encrypted events.
|
||||
"org.matrix.msc4028": self.config.experimental.msc4028_push_encrypted_events,
|
||||
# MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code
|
||||
"org.matrix.msc4108": self.config.experimental.msc4108_delegation_endpoint
|
||||
is not None,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -72,9 +72,6 @@ class PreviewUrlResource(RestServlet):
|
||||
# XXX: if get_user_by_req fails, what should we do in an async render?
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
url = parse_string(request, "url", required=True)
|
||||
ts = parse_integer(request, "ts")
|
||||
if ts is None:
|
||||
ts = self.clock.time_msec()
|
||||
|
||||
ts = parse_integer(request, "ts", default=self.clock.time_msec())
|
||||
og = await self.url_previewer.preview(url, requester.user, ts)
|
||||
respond_with_json_bytes(request, 200, og, send_cors=True)
|
||||
|
||||
@@ -385,7 +385,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
WITH all_receipts AS (
|
||||
SELECT room_id, thread_id, MAX(event_stream_ordering) AS max_receipt_stream_ordering
|
||||
FROM receipts_linearized
|
||||
LEFT JOIN events USING (room_id, event_id)
|
||||
WHERE
|
||||
{receipt_types_clause}
|
||||
AND user_id = ?
|
||||
@@ -621,13 +620,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
SELECT notif_count, COALESCE(unread_count, 0), thread_id
|
||||
FROM event_push_summary
|
||||
LEFT JOIN (
|
||||
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
|
||||
SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
|
||||
FROM receipts_linearized
|
||||
LEFT JOIN events USING (room_id, event_id)
|
||||
WHERE
|
||||
user_id = ?
|
||||
AND room_id = ?
|
||||
AND stream_ordering > ?
|
||||
AND event_stream_ordering > ?
|
||||
AND {receipt_types_clause}
|
||||
GROUP BY thread_id
|
||||
) AS receipts USING (thread_id)
|
||||
@@ -659,13 +657,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
sql = f"""
|
||||
SELECT COUNT(*), thread_id FROM event_push_actions
|
||||
LEFT JOIN (
|
||||
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
|
||||
SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
|
||||
FROM receipts_linearized
|
||||
LEFT JOIN events USING (room_id, event_id)
|
||||
WHERE
|
||||
user_id = ?
|
||||
AND room_id = ?
|
||||
AND stream_ordering > ?
|
||||
AND event_stream_ordering > ?
|
||||
AND {receipt_types_clause}
|
||||
GROUP BY thread_id
|
||||
) AS receipts USING (thread_id)
|
||||
@@ -738,13 +735,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
thread_id
|
||||
FROM event_push_actions
|
||||
LEFT JOIN (
|
||||
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
|
||||
SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
|
||||
FROM receipts_linearized
|
||||
LEFT JOIN events USING (room_id, event_id)
|
||||
WHERE
|
||||
user_id = ?
|
||||
AND room_id = ?
|
||||
AND stream_ordering > ?
|
||||
AND event_stream_ordering > ?
|
||||
AND {receipt_types_clause}
|
||||
GROUP BY thread_id
|
||||
) AS receipts USING (thread_id)
|
||||
@@ -910,9 +906,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
# given this function generally gets called with only one room and
|
||||
# thread ID.
|
||||
sql = f"""
|
||||
SELECT room_id, thread_id, MAX(stream_ordering)
|
||||
SELECT room_id, thread_id, MAX(event_stream_ordering)
|
||||
FROM receipts_linearized
|
||||
INNER JOIN events USING (room_id, event_id)
|
||||
WHERE {receipt_types_clause}
|
||||
AND {thread_ids_clause}
|
||||
AND {room_ids_clause}
|
||||
@@ -1442,9 +1437,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
)
|
||||
|
||||
sql = """
|
||||
SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, e.stream_ordering
|
||||
SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, r.event_stream_ordering
|
||||
FROM receipts_linearized AS r
|
||||
INNER JOIN events AS e USING (event_id)
|
||||
WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ?
|
||||
ORDER BY r.stream_id ASC
|
||||
LIMIT ?
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
import collections
|
||||
import itertools
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
@@ -54,7 +53,6 @@ from synapse.storage.database import (
|
||||
LoggingDatabaseConnection,
|
||||
LoggingTransaction,
|
||||
)
|
||||
from synapse.storage.databases.main.event_federation import EventFederationStore
|
||||
from synapse.storage.databases.main.events_worker import EventCacheEntry
|
||||
from synapse.storage.databases.main.search import SearchEntry
|
||||
from synapse.storage.engines import PostgresEngine
|
||||
@@ -770,26 +768,40 @@ class PersistEventsStore:
|
||||
# that have the same chain ID as the event.
|
||||
# 2. For each retained auth event we:
|
||||
# a. Add a link from the event's to the auth event's chain
|
||||
# ID/sequence number
|
||||
# ID/sequence number; and
|
||||
# b. Add a link from the event to every chain reachable by the
|
||||
# auth event.
|
||||
|
||||
# Step 1, fetch all existing links from all the chains we've seen
|
||||
# referenced.
|
||||
chain_links = _LinkMap()
|
||||
|
||||
for links in EventFederationStore._get_chain_links(
|
||||
txn, {chain_id for chain_id, _ in chain_map.values()}
|
||||
):
|
||||
for origin_chain_id, inner_links in links.items():
|
||||
for (
|
||||
origin_sequence_number,
|
||||
target_chain_id,
|
||||
target_sequence_number,
|
||||
) in inner_links:
|
||||
chain_links.add_link(
|
||||
(origin_chain_id, origin_sequence_number),
|
||||
(target_chain_id, target_sequence_number),
|
||||
new=False,
|
||||
)
|
||||
auth_chain_rows = cast(
|
||||
List[Tuple[int, int, int, int]],
|
||||
db_pool.simple_select_many_txn(
|
||||
txn,
|
||||
table="event_auth_chain_links",
|
||||
column="origin_chain_id",
|
||||
iterable={chain_id for chain_id, _ in chain_map.values()},
|
||||
keyvalues={},
|
||||
retcols=(
|
||||
"origin_chain_id",
|
||||
"origin_sequence_number",
|
||||
"target_chain_id",
|
||||
"target_sequence_number",
|
||||
),
|
||||
),
|
||||
)
|
||||
for (
|
||||
origin_chain_id,
|
||||
origin_sequence_number,
|
||||
target_chain_id,
|
||||
target_sequence_number,
|
||||
) in auth_chain_rows:
|
||||
chain_links.add_link(
|
||||
(origin_chain_id, origin_sequence_number),
|
||||
(target_chain_id, target_sequence_number),
|
||||
new=False,
|
||||
)
|
||||
|
||||
# We do this in toplogical order to avoid adding redundant links.
|
||||
for event_id in sorted_topologically(
|
||||
@@ -824,6 +836,18 @@ class PersistEventsStore:
|
||||
(chain_id, sequence_number), (auth_chain_id, auth_sequence_number)
|
||||
)
|
||||
|
||||
# Step 2b, add a link to chains reachable from the auth
|
||||
# event.
|
||||
for target_id, target_seq in chain_links.get_links_from(
|
||||
(auth_chain_id, auth_sequence_number)
|
||||
):
|
||||
if target_id == chain_id:
|
||||
continue
|
||||
|
||||
chain_links.add_link(
|
||||
(chain_id, sequence_number), (target_id, target_seq)
|
||||
)
|
||||
|
||||
db_pool.simple_insert_many_txn(
|
||||
txn,
|
||||
table="event_auth_chain_links",
|
||||
@@ -2427,6 +2451,31 @@ class _LinkMap:
|
||||
current_links[src_seq] = target_seq
|
||||
return True
|
||||
|
||||
def get_links_from(
|
||||
self, src_tuple: Tuple[int, int]
|
||||
) -> Generator[Tuple[int, int], None, None]:
|
||||
"""Gets the chains reachable from the given chain/sequence number.
|
||||
|
||||
Yields:
|
||||
The chain ID and sequence number the link points to.
|
||||
"""
|
||||
src_chain, src_seq = src_tuple
|
||||
for target_id, sequence_numbers in self.maps.get(src_chain, {}).items():
|
||||
for link_src_seq, target_seq in sequence_numbers.items():
|
||||
if link_src_seq <= src_seq:
|
||||
yield target_id, target_seq
|
||||
|
||||
def get_links_between(
|
||||
self, source_chain: int, target_chain: int
|
||||
) -> Generator[Tuple[int, int], None, None]:
|
||||
"""Gets the links between two chains.
|
||||
|
||||
Yields:
|
||||
The source and target sequence numbers.
|
||||
"""
|
||||
|
||||
yield from self.maps.get(source_chain, {}).get(target_chain, {}).items()
|
||||
|
||||
def get_additions(self) -> Generator[Tuple[int, int, int, int], None, None]:
|
||||
"""Gets any newly added links.
|
||||
|
||||
@@ -2453,24 +2502,9 @@ class _LinkMap:
|
||||
if src_chain == target_chain:
|
||||
return target_seq <= src_seq
|
||||
|
||||
# We have to graph traverse the links to check for indirect paths.
|
||||
visited_chains = collections.Counter()
|
||||
search = [(src_chain, src_seq)]
|
||||
while search:
|
||||
chain, seq = search.pop()
|
||||
visited_chains[chain] = max(seq, visited_chains[chain])
|
||||
for tc, links in self.maps.get(chain, {}).items():
|
||||
for ss, ts in links.items():
|
||||
# Don't revisit chains we've already seen, unless the target
|
||||
# sequence number is higher than last time.
|
||||
if ts <= visited_chains.get(tc, 0):
|
||||
continue
|
||||
|
||||
if ss <= seq:
|
||||
if tc == target_chain:
|
||||
if target_seq <= ts:
|
||||
return True
|
||||
else:
|
||||
search.append((tc, ts))
|
||||
links = self.get_links_between(src_chain, target_chain)
|
||||
for link_start_seq, link_end_seq in links:
|
||||
if link_start_seq <= src_seq and target_seq <= link_end_seq:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -178,14 +178,13 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
)
|
||||
|
||||
sql = f"""
|
||||
SELECT event_id, stream_ordering
|
||||
SELECT event_id, event_stream_ordering
|
||||
FROM receipts_linearized
|
||||
INNER JOIN events USING (room_id, event_id)
|
||||
WHERE {clause}
|
||||
AND user_id = ?
|
||||
AND room_id = ?
|
||||
AND thread_id IS NULL
|
||||
ORDER BY stream_ordering DESC
|
||||
ORDER BY event_stream_ordering DESC
|
||||
LIMIT 1
|
||||
"""
|
||||
|
||||
@@ -735,10 +734,13 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||
thread_clause = "r.thread_id = ?"
|
||||
thread_args = (thread_id,)
|
||||
|
||||
# If the receipt doesn't have a stream ordering it is because we
|
||||
# don't have the associated event, and so must be a remote receipt.
|
||||
# Hence it's safe to just allow new receipts to clobber it.
|
||||
sql = f"""
|
||||
SELECT stream_ordering, event_id FROM events
|
||||
INNER JOIN receipts_linearized AS r USING (event_id, room_id)
|
||||
WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ? AND {thread_clause}
|
||||
SELECT r.event_stream_ordering, r.event_id FROM receipts_linearized AS r
|
||||
WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ?
|
||||
AND r.event_stream_ordering IS NOT NULL AND {thread_clause}
|
||||
"""
|
||||
txn.execute(
|
||||
sql,
|
||||
|
||||
@@ -132,16 +132,12 @@ Changes in SCHEMA_VERSION = 82
|
||||
|
||||
Changes in SCHEMA_VERSION = 83
|
||||
- The event_txn_id is no longer used.
|
||||
|
||||
Changes in SCHEMA_VERSION = 84
|
||||
- No longer assumes that `event_auth_chain_links` holds transitive links, and
|
||||
so read operations must do graph traversal.
|
||||
"""
|
||||
|
||||
|
||||
SCHEMA_COMPAT_VERSION = (
|
||||
# Transitive links are no longer written to `event_auth_chain_links`
|
||||
84
|
||||
# The event_txn_id table and tables from MSC2716 no longer exist.
|
||||
83
|
||||
)
|
||||
"""Limit on how far the synapse codebase can be rolled back without breaking db compat
|
||||
|
||||
|
||||
@@ -1101,6 +1101,56 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase):
|
||||
},
|
||||
)
|
||||
|
||||
def test_has_different_keys(self) -> None:
|
||||
"""check that has_different_keys returns True when the keys provided are different to what
|
||||
is in the database."""
|
||||
local_user = "@boris:" + self.hs.hostname
|
||||
keys1 = {
|
||||
"master_key": {
|
||||
# private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0
|
||||
"user_id": local_user,
|
||||
"usage": ["master"],
|
||||
"keys": {
|
||||
"ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk"
|
||||
},
|
||||
}
|
||||
}
|
||||
self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys1))
|
||||
is_different = self.get_success(
|
||||
self.handler.has_different_keys(
|
||||
local_user,
|
||||
{
|
||||
"master_key": keys1["master_key"],
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(is_different, False)
|
||||
# change the usage => different keys
|
||||
keys1["master_key"]["usage"] = ["develop"]
|
||||
is_different = self.get_success(
|
||||
self.handler.has_different_keys(
|
||||
local_user,
|
||||
{
|
||||
"master_key": keys1["master_key"],
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(is_different, True)
|
||||
keys1["master_key"]["usage"] = ["master"] # reset
|
||||
# change the key => different keys
|
||||
keys1["master_key"]["keys"] = {
|
||||
"ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs"
|
||||
}
|
||||
is_different = self.get_success(
|
||||
self.handler.has_different_keys(
|
||||
local_user,
|
||||
{
|
||||
"master_key": keys1["master_key"],
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(is_different, True)
|
||||
|
||||
def test_query_devices_remote_sync(self) -> None:
|
||||
"""Tests that querying keys for a remote user that we share a room with,
|
||||
but haven't yet fetched the keys for, returns the cross signing keys
|
||||
|
||||
@@ -277,7 +277,8 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
|
||||
self.assertEqual(400, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.MISSING_PARAM, channel.json_body["errcode"])
|
||||
self.assertEqual(
|
||||
"Missing integer query parameter 'before_ts'", channel.json_body["error"]
|
||||
"Missing required integer query parameter before_ts",
|
||||
channel.json_body["error"],
|
||||
)
|
||||
|
||||
def test_invalid_parameter(self) -> None:
|
||||
@@ -320,7 +321,7 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
|
||||
self.assertEqual(400, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
|
||||
self.assertEqual(
|
||||
"Query parameter size_gt must be a string representing a positive integer.",
|
||||
"Query parameter size_gt must be a positive integer.",
|
||||
channel.json_body["error"],
|
||||
)
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
import json
|
||||
import time
|
||||
import urllib.parse
|
||||
from http import HTTPStatus
|
||||
from typing import List, Optional
|
||||
from unittest.mock import AsyncMock, Mock
|
||||
|
||||
@@ -2190,6 +2191,33 @@ class RoomMessagesTestCase(unittest.HomeserverTestCase):
|
||||
chunk = channel.json_body["chunk"]
|
||||
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
|
||||
|
||||
def test_room_message_filter_query_validation(self) -> None:
|
||||
# Test json validation in (filter) query parameter.
|
||||
# Does not test the validity of the filter, only the json validation.
|
||||
|
||||
# Check Get with valid json filter parameter, expect 200.
|
||||
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={valid_filter_str}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||
|
||||
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||
invalid_filter_str = "}}}{}"
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={invalid_filter_str}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||
)
|
||||
|
||||
|
||||
class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
@@ -2522,6 +2550,39 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
||||
else:
|
||||
self.fail("Event %s from events_after not found" % j)
|
||||
|
||||
def test_room_event_context_filter_query_validation(self) -> None:
|
||||
# Test json validation in (filter) query parameter.
|
||||
# Does not test the validity of the filter, only the json validation.
|
||||
|
||||
# Create a user with room and event_id.
|
||||
user_id = self.register_user("test", "test")
|
||||
user_tok = self.login("test", "test")
|
||||
room_id = self.helper.create_room_as(user_id, tok=user_tok)
|
||||
event_id = self.helper.send(room_id, "message 1", tok=user_tok)["event_id"]
|
||||
|
||||
# Check Get with valid json filter parameter, expect 200.
|
||||
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={valid_filter_str}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||
|
||||
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||
invalid_filter_str = "}}}{}"
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={invalid_filter_str}",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||
)
|
||||
|
||||
|
||||
class MakeRoomAdminTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
|
||||
@@ -27,8 +27,10 @@ from synapse.util import Clock
|
||||
|
||||
from tests import unittest
|
||||
from tests.unittest import override_config
|
||||
from tests.utils import HAS_AUTHLIB
|
||||
|
||||
endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous"
|
||||
msc3886_endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous"
|
||||
msc4108_endpoint = "/_matrix/client/unstable/org.matrix.msc4108/rendezvous"
|
||||
|
||||
|
||||
class RendezvousServletTestCase(unittest.HomeserverTestCase):
|
||||
@@ -41,11 +43,35 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase):
|
||||
return self.hs
|
||||
|
||||
def test_disabled(self) -> None:
|
||||
channel = self.make_request("POST", endpoint, {}, access_token=None)
|
||||
channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None)
|
||||
self.assertEqual(channel.code, 404)
|
||||
channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None)
|
||||
self.assertEqual(channel.code, 404)
|
||||
|
||||
@override_config({"experimental_features": {"msc3886_endpoint": "/asd"}})
|
||||
def test_redirect(self) -> None:
|
||||
channel = self.make_request("POST", endpoint, {}, access_token=None)
|
||||
def test_msc3886_redirect(self) -> None:
|
||||
channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None)
|
||||
self.assertEqual(channel.code, 307)
|
||||
self.assertEqual(channel.headers.getRawHeaders("Location"), ["/asd"])
|
||||
|
||||
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||
@override_config(
|
||||
{
|
||||
"disable_registration": True,
|
||||
"experimental_features": {
|
||||
"msc4108_delegation_endpoint": "https://asd",
|
||||
"msc3861": {
|
||||
"enabled": True,
|
||||
"issuer": "https://issuer",
|
||||
"client_id": "client_id",
|
||||
"client_auth_method": "client_secret_post",
|
||||
"client_secret": "client_secret",
|
||||
"admin_token": "admin_token_value",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
def test_msc4108_delegation(self) -> None:
|
||||
channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None)
|
||||
self.assertEqual(channel.code, 307)
|
||||
self.assertEqual(channel.headers.getRawHeaders("Location"), ["https://asd"])
|
||||
|
||||
@@ -2175,6 +2175,31 @@ class RoomMessageListTestCase(RoomBase):
|
||||
chunk = channel.json_body["chunk"]
|
||||
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
|
||||
|
||||
def test_room_message_filter_query_validation(self) -> None:
|
||||
# Test json validation in (filter) query parameter.
|
||||
# Does not test the validity of the filter, only the json validation.
|
||||
|
||||
# Check Get with valid json filter parameter, expect 200.
|
||||
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={valid_filter_str}",
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||
|
||||
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||
invalid_filter_str = "}}}{}"
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={invalid_filter_str}",
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||
)
|
||||
|
||||
|
||||
class RoomMessageFilterTestCase(RoomBase):
|
||||
"""Tests /rooms/$room_id/messages REST events."""
|
||||
@@ -3213,6 +3238,33 @@ class ContextTestCase(unittest.HomeserverTestCase):
|
||||
self.assertDictEqual(events_after[0].get("content"), {}, events_after[0])
|
||||
self.assertEqual(events_after[1].get("content"), {}, events_after[1])
|
||||
|
||||
def test_room_event_context_filter_query_validation(self) -> None:
|
||||
# Test json validation in (filter) query parameter.
|
||||
# Does not test the validity of the filter, only the json validation.
|
||||
event_id = self.helper.send(self.room_id, "message 7", tok=self.tok)["event_id"]
|
||||
|
||||
# Check Get with valid json filter parameter, expect 200.
|
||||
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/rooms/{self.room_id}/context/{event_id}?filter={valid_filter_str}",
|
||||
access_token=self.tok,
|
||||
)
|
||||
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||
|
||||
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||
invalid_filter_str = "}}}{}"
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/rooms/{self.room_id}/context/{event_id}?filter={invalid_filter_str}",
|
||||
access_token=self.tok,
|
||||
)
|
||||
|
||||
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||
self.assertEqual(
|
||||
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||
)
|
||||
|
||||
|
||||
class RoomAliasListTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
|
||||
@@ -21,8 +21,6 @@
|
||||
|
||||
from typing import Dict, List, Set, Tuple, cast
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
from twisted.trial import unittest
|
||||
|
||||
@@ -47,8 +45,7 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
self.store = hs.get_datastores().main
|
||||
self._next_stream_ordering = 1
|
||||
|
||||
@parameterized.expand([(False,), (True,)])
|
||||
def test_simple(self, batched: bool) -> None:
|
||||
def test_simple(self) -> None:
|
||||
"""Test that the example in `docs/auth_chain_difference_algorithm.md`
|
||||
works.
|
||||
"""
|
||||
@@ -56,7 +53,6 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
event_factory = self.hs.get_event_builder_factory()
|
||||
bob = "@creator:test"
|
||||
alice = "@alice:test"
|
||||
charlie = "@charlie:test"
|
||||
room_id = "!room:test"
|
||||
|
||||
# Ensure that we have a rooms entry so that we generate the chain index.
|
||||
@@ -195,26 +191,6 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
)
|
||||
)
|
||||
|
||||
charlie_invite = self.get_success(
|
||||
event_factory.for_room_version(
|
||||
RoomVersions.V6,
|
||||
{
|
||||
"type": EventTypes.Member,
|
||||
"state_key": charlie,
|
||||
"sender": alice,
|
||||
"room_id": room_id,
|
||||
"content": {"tag": "charlie_invite"},
|
||||
},
|
||||
).build(
|
||||
prev_event_ids=[],
|
||||
auth_event_ids=[
|
||||
create.event_id,
|
||||
alice_join2.event_id,
|
||||
power_2.event_id,
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
events = [
|
||||
create,
|
||||
bob_join,
|
||||
@@ -224,41 +200,33 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
bob_join_2,
|
||||
power_2,
|
||||
alice_join2,
|
||||
charlie_invite,
|
||||
]
|
||||
|
||||
expected_links = [
|
||||
(bob_join, create),
|
||||
(power, create),
|
||||
(power, bob_join),
|
||||
(alice_invite, create),
|
||||
(alice_invite, power),
|
||||
(alice_invite, bob_join),
|
||||
(bob_join_2, power),
|
||||
(alice_join2, power_2),
|
||||
(charlie_invite, alice_join2),
|
||||
]
|
||||
|
||||
# We either persist as a batch or one-by-one depending on test
|
||||
# parameter.
|
||||
if batched:
|
||||
self.persist(events)
|
||||
else:
|
||||
for event in events:
|
||||
self.persist([event])
|
||||
|
||||
self.persist(events)
|
||||
chain_map, link_map = self.fetch_chains(events)
|
||||
|
||||
# Check that the expected links and only the expected links have been
|
||||
# added.
|
||||
event_map = {e.event_id: e for e in events}
|
||||
reverse_chain_map = {v: event_map[k] for k, v in chain_map.items()}
|
||||
self.assertEqual(len(expected_links), len(list(link_map.get_additions())))
|
||||
|
||||
self.maxDiff = None
|
||||
self.assertCountEqual(
|
||||
expected_links,
|
||||
[
|
||||
(reverse_chain_map[(s1, s2)], reverse_chain_map[(t1, t2)])
|
||||
for s1, s2, t1, t2 in link_map.get_additions()
|
||||
],
|
||||
)
|
||||
for start, end in expected_links:
|
||||
start_id, start_seq = chain_map[start.event_id]
|
||||
end_id, end_seq = chain_map[end.event_id]
|
||||
|
||||
self.assertIn(
|
||||
(start_seq, end_seq), list(link_map.get_links_between(start_id, end_id))
|
||||
)
|
||||
|
||||
# Test that everything can reach the create event, but the create event
|
||||
# can't reach anything.
|
||||
@@ -400,23 +368,24 @@ class EventChainStoreTestCase(HomeserverTestCase):
|
||||
|
||||
expected_links = [
|
||||
(bob_join, create),
|
||||
(power, create),
|
||||
(power, bob_join),
|
||||
(alice_invite, create),
|
||||
(alice_invite, power),
|
||||
(alice_invite, bob_join),
|
||||
]
|
||||
|
||||
# Check that the expected links and only the expected links have been
|
||||
# added.
|
||||
event_map = {e.event_id: e for e in events}
|
||||
reverse_chain_map = {v: event_map[k] for k, v in chain_map.items()}
|
||||
self.assertEqual(len(expected_links), len(list(link_map.get_additions())))
|
||||
|
||||
self.maxDiff = None
|
||||
self.assertCountEqual(
|
||||
expected_links,
|
||||
[
|
||||
(reverse_chain_map[(s1, s2)], reverse_chain_map[(t1, t2)])
|
||||
for s1, s2, t1, t2 in link_map.get_additions()
|
||||
],
|
||||
)
|
||||
for start, end in expected_links:
|
||||
start_id, start_seq = chain_map[start.event_id]
|
||||
end_id, end_seq = chain_map[end.event_id]
|
||||
|
||||
self.assertIn(
|
||||
(start_seq, end_seq), list(link_map.get_links_between(start_id, end_id))
|
||||
)
|
||||
|
||||
def persist(
|
||||
self,
|
||||
@@ -520,6 +489,8 @@ class LinkMapTestCase(unittest.TestCase):
|
||||
link_map = _LinkMap()
|
||||
|
||||
link_map.add_link((1, 1), (2, 1), new=False)
|
||||
self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1)])
|
||||
self.assertCountEqual(link_map.get_links_from((1, 1)), [(2, 1)])
|
||||
self.assertCountEqual(link_map.get_additions(), [])
|
||||
self.assertTrue(link_map.exists_path_from((1, 5), (2, 1)))
|
||||
self.assertFalse(link_map.exists_path_from((1, 5), (2, 2)))
|
||||
@@ -528,31 +499,18 @@ class LinkMapTestCase(unittest.TestCase):
|
||||
|
||||
# Attempting to add a redundant link is ignored.
|
||||
self.assertFalse(link_map.add_link((1, 4), (2, 1)))
|
||||
self.assertCountEqual(link_map.get_additions(), [])
|
||||
self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1)])
|
||||
|
||||
# Adding new non-redundant links works
|
||||
self.assertTrue(link_map.add_link((1, 3), (2, 3)))
|
||||
self.assertCountEqual(link_map.get_additions(), [(1, 3, 2, 3)])
|
||||
self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1), (3, 3)])
|
||||
|
||||
self.assertTrue(link_map.add_link((2, 5), (1, 3)))
|
||||
self.assertCountEqual(link_map.get_links_between(2, 1), [(5, 3)])
|
||||
self.assertCountEqual(link_map.get_links_between(1, 2), [(1, 1), (3, 3)])
|
||||
|
||||
self.assertCountEqual(link_map.get_additions(), [(1, 3, 2, 3), (2, 5, 1, 3)])
|
||||
|
||||
def test_exists_path_from(self) -> None:
|
||||
"Check that `exists_path_from` can handle non-direct links"
|
||||
link_map = _LinkMap()
|
||||
|
||||
link_map.add_link((1, 1), (2, 1), new=False)
|
||||
link_map.add_link((2, 1), (3, 1), new=False)
|
||||
|
||||
self.assertTrue(link_map.exists_path_from((1, 4), (3, 1)))
|
||||
self.assertFalse(link_map.exists_path_from((1, 4), (3, 2)))
|
||||
|
||||
link_map.add_link((1, 5), (2, 3), new=False)
|
||||
link_map.add_link((2, 2), (3, 3), new=False)
|
||||
|
||||
self.assertTrue(link_map.exists_path_from((1, 6), (3, 2)))
|
||||
self.assertFalse(link_map.exists_path_from((1, 4), (3, 2)))
|
||||
|
||||
|
||||
class EventChainBackgroundUpdateTestCase(HomeserverTestCase):
|
||||
servlets = [
|
||||
|
||||
Reference in New Issue
Block a user