Compare commits
135 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 0535f631f5 | |||
| f2188cf931 | |||
| c4746a321d | |||
| 47e4c6eb79 | |||
| b9fab43943 | |||
| 799bd77170 | |||
| ab3f4dc5b5 | |||
| 1525a3b4d4 | |||
| 0fad0a725c | |||
| f7bc63ef57 | |||
| ecad88f5c5 | |||
| 30fcd586fe | |||
| 4aa725a730 | |||
| 2d4f28915e | |||
| 12dc6b102f | |||
| 0c31783b4f | |||
| e462950338 | |||
| 3e34f5ccc7 | |||
| 8ae9d9e8c5 | |||
| 22bb3c50d1 | |||
| 74a70190ab | |||
| 0b1830b121 | |||
| 74aa47828d | |||
| 816054b012 | |||
| aaffc3566e | |||
| fe3f462b79 | |||
| c274839234 | |||
| 5a833ebbc8 | |||
| 30418653fd | |||
| 26331cbbd5 | |||
| d6f9332a6b | |||
| c1b7c6b12e | |||
| c1815bf5a1 | |||
| 703f2e8c43 | |||
| 068e22b4b7 | |||
| e4074749d2 | |||
| 8f07ef5c93 | |||
| 4c84c9c4ad | |||
| deb09b3836 | |||
| 77261301d2 | |||
| 0076197c97 | |||
| dcf7b39276 | |||
| 29534e7d0a | |||
| 553e9882bf | |||
| 3391da348f | |||
| 6fe41d2b47 | |||
| 5b03265cfb | |||
| b8a333004a | |||
| e41174cae3 | |||
| 37e893499f | |||
| c46d452c7c | |||
| 27dbb1b429 | |||
| aa6e5c2ecb | |||
| ac1bf682ff | |||
| a0b70473fc | |||
| 95a85b1129 | |||
| 3d8535b1de | |||
| 628351b98d | |||
| 8f27b3af07 | |||
| 579f4ac1cd | |||
| c53999dab8 | |||
| b41a9ebb38 | |||
| 6ec5e13ec9 | |||
| 148e93576e | |||
| 56ed412839 | |||
| 9c5d08fff8 | |||
| 90a6bd01c2 | |||
| aa07a01452 | |||
| 8364c01a2b | |||
| e27808f306 | |||
| 048c1ac7f6 | |||
| ca290d325c | |||
| 0a31cf18cd | |||
| 48db0c2d6c | |||
| 24c4d82aeb | |||
| 3fda8d3b67 | |||
| 5f15a549d7 | |||
| 6cefbc6852 | |||
| fd3ec6435e | |||
| 39bd6e2c16 | |||
| 5c736cd2af | |||
| e70e8d132c | |||
| 48334fbc40 | |||
| b4fd694ce3 | |||
| e2d757f62d | |||
| aab3672037 | |||
| d0677dca39 | |||
| e34fd1228d | |||
| beea39f000 | |||
| fa320c4fcb | |||
| 22c2add9c0 | |||
| 60f596b4d8 | |||
| 1143e14479 | |||
| c199ede287 | |||
| 9fb7333a7c | |||
| a0a4a36891 | |||
| 49fcda31f6 | |||
| b3ba501c52 | |||
| 6306de8e16 | |||
| b5267678d2 | |||
| ebc21a8c67 | |||
| e5a53819fc | |||
| 66b24d3d00 | |||
| 2b59e738ee | |||
| b1d030a107 | |||
| 7c2284b2f2 | |||
| d69c00b5a1 | |||
| 2d23250da7 | |||
| 234d07eb09 | |||
| bd9a1079bc | |||
| 3eb92369ca | |||
| 09f377fa52 | |||
| f1b0f9a4ef | |||
| f1ecf46647 | |||
| 57bf44941e | |||
| 3d60a58ad6 | |||
| 8208186e3c | |||
| 29d586311d | |||
| 512c9efcb3 | |||
| 35c361c0d9 | |||
| 95853c5f31 | |||
| eb019c03c4 | |||
| eedab12e6d | |||
| 483602efb2 | |||
| ac429050bc | |||
| daa783f16c | |||
| 6c4037dcf3 | |||
| 737f6c73f7 | |||
| ed6edc17d0 | |||
| 5b0873516c | |||
| 5da7081197 | |||
| 5cf74c2da0 | |||
| adce8a0111 | |||
| 790ce14e46 | |||
| ecbc0b740c |
@@ -60,7 +60,7 @@ trial_postgres_tests = [
|
||||
{
|
||||
"python-version": "3.9",
|
||||
"database": "postgres",
|
||||
"postgres-version": "11",
|
||||
"postgres-version": "13",
|
||||
"extras": "all",
|
||||
}
|
||||
]
|
||||
|
||||
@@ -4,8 +4,9 @@ name: Build docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ["v*"]
|
||||
branches: [ master, main, develop ]
|
||||
# TEMP
|
||||
# tags: ["v*"]
|
||||
# branches: [ master, main, develop ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@@ -14,7 +15,7 @@ permissions:
|
||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
@@ -30,7 +31,7 @@ jobs:
|
||||
run: docker buildx inspect
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
uses: sigstore/cosign-installer@v3.8.0
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -44,12 +45,16 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
# TEMP
|
||||
if: ${{ false }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@v3
|
||||
# TEMP
|
||||
if: ${{ false }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -74,7 +79,8 @@ jobs:
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
# TEMP
|
||||
push: false
|
||||
labels: |
|
||||
gitsha1=${{ github.sha }}
|
||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||
@@ -88,6 +94,8 @@ jobs:
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
- name: Sign the images with GitHub OIDC Token
|
||||
# TEMP
|
||||
if: ${{ false }}
|
||||
env:
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
TAGS: ${{ steps.set-tag.outputs.tags }}
|
||||
|
||||
@@ -14,7 +14,7 @@ jobs:
|
||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||
- name: 📥 Download artifact
|
||||
uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6
|
||||
uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
with:
|
||||
workflow: docs-pr.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
# We use nightly so that `fmt` correctly groups together imports, and
|
||||
# clippy correctly fixes up the benchmarks.
|
||||
toolchain: nightly-2022-12-01
|
||||
components: rustfmt
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Setup Poetry
|
||||
|
||||
@@ -221,3 +221,7 @@ jobs:
|
||||
Sdist/*
|
||||
Wheel*/*
|
||||
debs.tar.xz
|
||||
# if it's not already published, keep the release as a draft.
|
||||
draft: true
|
||||
# mark it as a prerelease if the tag contains 'rc'.
|
||||
prerelease: ${{ contains(github.ref, 'rc') }}
|
||||
|
||||
@@ -581,7 +581,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.9"
|
||||
postgres-version: "11"
|
||||
postgres-version: "13"
|
||||
|
||||
- python-version: "3.13"
|
||||
postgres-version: "17"
|
||||
|
||||
+140
-3708
File diff suppressed because it is too large
Load Diff
Generated
+141
-40
@@ -13,9 +13,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.93"
|
||||
version = "1.0.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
|
||||
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -35,6 +35,12 @@ version = "0.21.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36"
|
||||
|
||||
[[package]]
|
||||
name = "blake2"
|
||||
version = "0.10.6"
|
||||
@@ -61,9 +67,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.9.0"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
|
||||
checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
@@ -119,15 +125,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.15"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
|
||||
checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"libc",
|
||||
"wasi",
|
||||
"wasm-bindgen",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -168,9 +173,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.1.0"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
|
||||
checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
@@ -218,9 +223,9 @@ checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.22"
|
||||
version = "0.4.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
|
||||
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@@ -272,9 +277,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.23.2"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f54b3d09cbdd1f8c20650b28e7b09e338881482f4aa908a5f61a00c98fba2690"
|
||||
checksum = "57fe09249128b3173d092de9523eaa75136bf7ba85e0d69eca241c7939c933cc"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
@@ -291,9 +296,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.23.2"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3015cf985888fe66cfb63ce0e321c603706cd541b7aec7ddd35c281390af45d8"
|
||||
checksum = "1cd3927b5a78757a0d71aa9dff669f903b1eb64b54142a9bd9f757f8fde65fd7"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
@@ -301,9 +306,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.23.2"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6fca7cd8fd809b5ac4eefb89c1f98f7a7651d3739dfb341ca6980090f554c270"
|
||||
checksum = "dab6bb2102bd8f991e7749f130a70d05dd557613e39ed2deeee8e9ca0c4d548d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
@@ -322,9 +327,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.23.2"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34e657fa5379a79151b6ff5328d9216a84f55dc93b17b08e7c3609a969b73aa0"
|
||||
checksum = "91871864b353fd5ffcb3f91f2f703a22a9797c91b9ab497b1acac7b07ae509c7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
@@ -334,9 +339,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.23.2"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "295548d5ffd95fd1981d2d3cf4458831b21d60af046b729b6fd143b0ba7aee2f"
|
||||
checksum = "43abc3b80bc20f3facd86cd3c60beed58c3e2aa26213f3cda368de39c60a27e4"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -366,20 +371,20 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rand_chacha",
|
||||
"rand_core",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.3.1"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
||||
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
|
||||
dependencies = [
|
||||
"ppv-lite86",
|
||||
"rand_core",
|
||||
@@ -387,11 +392,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.6.4"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||
checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -431,18 +437,18 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.215"
|
||||
version = "1.0.217"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
|
||||
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.215"
|
||||
version = "1.0.217"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
|
||||
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -451,9 +457,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.133"
|
||||
version = "1.0.138"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
|
||||
checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -538,11 +544,10 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
|
||||
|
||||
[[package]]
|
||||
name = "ulid"
|
||||
version = "1.1.3"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04f903f293d11f31c0c29e4148f6dc0d033a7f80cebc0282bea147611667d289"
|
||||
checksum = "ab82fc73182c29b02e2926a6df32f2241dbadb5cfc111fd595515b3598f46bb3"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"rand",
|
||||
"web-time",
|
||||
]
|
||||
@@ -567,9 +572,12 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.0+wasi-snapshot-preview1"
|
||||
version = "0.13.3+wasi-0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2"
|
||||
dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
@@ -634,3 +642,96 @@ dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen-rt"
|
||||
version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa91407dacce3a68c56de03abe2760159582b846c6a4acd2f456618087f12713"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.8.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06718a168365cad3d5ff0bb133aad346959a2074bd4a85c121255a11304a8626"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
Licensees holding a valid commercial license with Element may use this
|
||||
software in accordance with the terms contained in a written agreement
|
||||
between you and Element.
|
||||
|
||||
To purchase a commercial license please contact our sales team at
|
||||
licensing@element.io
|
||||
+21
-6
@@ -10,14 +10,15 @@ implementation, written and maintained by `Element <https://element.io>`_.
|
||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
||||
secure and interoperable real time communications. You can directly run
|
||||
and manage the source code in this repository, available under an AGPL
|
||||
license. There is no support provided from Element unless you have a
|
||||
subscription.
|
||||
license (or alternatively under a commercial license from Element).
|
||||
There is no support provided by Element unless you have a
|
||||
subscription from Element.
|
||||
|
||||
Subscription alternative
|
||||
========================
|
||||
Subscription
|
||||
============
|
||||
|
||||
Alternatively, for those that need an enterprise-ready solution, Element
|
||||
Server Suite (ESS) is `available as a subscription <https://element.io/pricing>`_.
|
||||
For those that need an enterprise-ready solution, Element
|
||||
Server Suite (ESS) is `available via subscription <https://element.io/pricing>`_.
|
||||
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
||||
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
||||
giving admins the power to easily manage an organization-wide
|
||||
@@ -249,6 +250,20 @@ Developers might be particularly interested in:
|
||||
Alongside all that, join our developer community on Matrix:
|
||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||
|
||||
Copyright and Licensing
|
||||
=======================
|
||||
|
||||
Copyright 2014-2017 OpenMarket Ltd
|
||||
Copyright 2017 Vector Creations Ltd
|
||||
Copyright 2017-2025 New Vector Ltd
|
||||
|
||||
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
|
||||
|
||||
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
|
||||
|
||||
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
||||
|
||||
|
||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||
:alt: (get community support in #synapse:matrix.org)
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
Make sure we advertise registration as disabled when MSC3861 is enabled.
|
||||
@@ -1 +0,0 @@
|
||||
Support stable account suspension from [MSC3823](https://github.com/matrix-org/matrix-spec-proposals/pull/3823).
|
||||
@@ -1 +0,0 @@
|
||||
Add `RoomID` & `EventID` rust types.
|
||||
@@ -0,0 +1 @@
|
||||
Speed up the building of the Docker image.
|
||||
@@ -0,0 +1 @@
|
||||
Use a [`distroless`](https://github.com/GoogleContainerTools/distroless) base runtime image.
|
||||
@@ -0,0 +1 @@
|
||||
Cleanup deleted state group references.
|
||||
@@ -245,7 +245,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
if "flows" not in json_res:
|
||||
print("Failed to find any login flows.")
|
||||
defer.returnValue(False)
|
||||
return False
|
||||
|
||||
flow = json_res["flows"][0] # assume first is the one we want.
|
||||
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
||||
@@ -254,8 +254,8 @@ class SynapseCmd(cmd.Cmd):
|
||||
"Unable to login via the command line client. Please visit "
|
||||
"%s to login." % fallback_url
|
||||
)
|
||||
defer.returnValue(False)
|
||||
defer.returnValue(True)
|
||||
return False
|
||||
return True
|
||||
|
||||
def do_emailrequest(self, line):
|
||||
"""Requests the association of a third party identifier
|
||||
|
||||
@@ -78,7 +78,7 @@ class TwistedHttpClient(HttpClient):
|
||||
url, data, headers_dict={"Content-Type": ["application/json"]}
|
||||
)
|
||||
body = yield readBody(response)
|
||||
defer.returnValue((response.code, body))
|
||||
return response.code, body
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_json(self, url, args=None):
|
||||
@@ -88,7 +88,7 @@ class TwistedHttpClient(HttpClient):
|
||||
url = "%s?%s" % (url, qs)
|
||||
response = yield self._create_get_request(url)
|
||||
body = yield readBody(response)
|
||||
defer.returnValue(json.loads(body))
|
||||
return json.loads(body)
|
||||
|
||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||
"""Wrapper of _create_request to issue a PUT request"""
|
||||
@@ -134,7 +134,7 @@ class TwistedHttpClient(HttpClient):
|
||||
response = yield self._create_request(method, url)
|
||||
|
||||
body = yield readBody(response)
|
||||
defer.returnValue(json.loads(body))
|
||||
return json.loads(body)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _create_request(
|
||||
@@ -173,7 +173,7 @@ class TwistedHttpClient(HttpClient):
|
||||
if self.verbose:
|
||||
print("Status %s %s" % (response.code, response.phrase))
|
||||
print(pformat(list(response.headers.getAllRawHeaders())))
|
||||
defer.returnValue(response)
|
||||
return response
|
||||
|
||||
def sleep(self, seconds):
|
||||
d = defer.Deferred()
|
||||
|
||||
@@ -51,7 +51,7 @@ services:
|
||||
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
||||
|
||||
db:
|
||||
image: docker.io/postgres:12-alpine
|
||||
image: docker.io/postgres:15-alpine
|
||||
# Change that password, of course!
|
||||
environment:
|
||||
- POSTGRES_USER=synapse
|
||||
|
||||
Vendored
+66
@@ -1,3 +1,69 @@
|
||||
matrix-synapse-py3 (1.125.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.125.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Feb 2025 13:32:49 +0000
|
||||
|
||||
matrix-synapse-py3 (1.124.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.124.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Feb 2025 11:55:22 +0100
|
||||
|
||||
matrix-synapse-py3 (1.124.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.124.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Feb 2025 13:42:55 +0000
|
||||
|
||||
matrix-synapse-py3 (1.124.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.124.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Feb 2025 16:35:53 +0000
|
||||
|
||||
matrix-synapse-py3 (1.124.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.124.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Feb 2025 11:53:05 +0000
|
||||
|
||||
matrix-synapse-py3 (1.123.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.123.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2025 08:37:34 -0700
|
||||
|
||||
matrix-synapse-py3 (1.123.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.123.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Jan 2025 14:39:57 +0100
|
||||
|
||||
matrix-synapse-py3 (1.122.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.122.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Jan 2025 14:14:14 +0000
|
||||
|
||||
matrix-synapse-py3 (1.122.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.122.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Jan 2025 14:06:19 +0000
|
||||
|
||||
matrix-synapse-py3 (1.121.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.121.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 18:24:48 +0000
|
||||
|
||||
matrix-synapse-py3 (1.121.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.121.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 13:12:30 +0100
|
||||
|
||||
matrix-synapse-py3 (1.121.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.121.0rc1.
|
||||
|
||||
@@ -138,6 +138,10 @@ for port in 8080 8081 8082; do
|
||||
per_user:
|
||||
per_second: 1000
|
||||
burst_count: 1000
|
||||
rc_presence:
|
||||
per_user:
|
||||
per_second: 1000
|
||||
burst_count: 1000
|
||||
RC
|
||||
)
|
||||
echo "${ratelimiting}" >> "$port.config"
|
||||
|
||||
+92
-84
@@ -20,45 +20,17 @@
|
||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||
# in `poetry export` in the past.
|
||||
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG DEBIAN_VERSION_NUMERIC=12
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG POETRY_VERSION=1.8.3
|
||||
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bookworm.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS requirements
|
||||
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
# Here we use it to set up a cache for apt (and below for pip), to improve
|
||||
# rebuild speeds on slow connections.
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential curl git libffi-dev libssl-dev pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install rust and ensure its in the PATH.
|
||||
# (Rust may be needed to compile `cryptography`---which is one of poetry's
|
||||
# dependencies---on platforms that don't have a `cryptography` wheel.
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||
# set to true, so we expose it as a build-arg.
|
||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
# synapse's dependencies.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --user "poetry==1.3.2"
|
||||
### This stage is platform-agnostic, so we can use the build platform in case of cross-compilation.
|
||||
###
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS requirements
|
||||
|
||||
WORKDIR /synapse
|
||||
|
||||
@@ -75,41 +47,30 @@ ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION
|
||||
# Instead, we'll just install what a regular `pip install` would from PyPI.
|
||||
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||
|
||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
||||
# (mounted as --mount=type=cache) and the target directory.
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
|
||||
# Otherwise, just create an empty requirements file so that the Dockerfile can
|
||||
# proceed.
|
||||
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
||||
ARG POETRY_VERSION
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
uvx --with poetry-plugin-export==1.8.0 \
|
||||
poetry@${POETRY_VERSION} export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
||||
else \
|
||||
touch /synapse/requirements.txt; \
|
||||
touch /synapse/requirements.txt; \
|
||||
fi
|
||||
|
||||
###
|
||||
### Stage 1: builder
|
||||
###
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS builder
|
||||
|
||||
# install the OS build deps
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libpq-dev \
|
||||
libssl-dev \
|
||||
libwebp-dev \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
openssl \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
libicu-dev \
|
||||
pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS builder
|
||||
|
||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
||||
# (mounted as --mount=type=cache) and the target directory.
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
# Install rust and ensure its in the PATH
|
||||
ENV RUSTUP_HOME=/rust
|
||||
@@ -119,7 +80,6 @@ RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
|
||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||
# set to true, so we expose it as a build-arg.
|
||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||
@@ -131,8 +91,8 @@ ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
#
|
||||
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
||||
COPY --from=requirements /synapse/requirements.txt /synapse/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv pip install --prefix="/install" --no-deps -r /synapse/requirements.txt
|
||||
|
||||
# Copy over the rest of the synapse source code.
|
||||
COPY synapse /synapse/synapse/
|
||||
@@ -146,48 +106,96 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||
# Install the synapse package itself.
|
||||
# If we have populated requirements.txt, we don't install any dependencies
|
||||
# as we should already have those from the previous `pip install` step.
|
||||
RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||
RUN \
|
||||
--mount=type=cache,target=/root/.cache/uv \
|
||||
--mount=type=cache,target=/synapse/target,sharing=locked \
|
||||
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
|
||||
uv pip install --prefix="/install" --no-deps /synapse[all]; \
|
||||
else \
|
||||
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
|
||||
uv pip install --prefix="/install" /synapse[all]; \
|
||||
fi
|
||||
|
||||
###
|
||||
### Stage 2: runtime
|
||||
## Stage 2: runtime dependencies download for ARM64 and AMD64
|
||||
###
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/astral-sh/uv:${DEBIAN_VERSION} AS runtime-deps
|
||||
|
||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
# Add both target architectures
|
||||
RUN dpkg --add-architecture arm64
|
||||
RUN dpkg --add-architecture amd64
|
||||
|
||||
ARG PYTHON_VERSION
|
||||
RUN uv python install \
|
||||
cpython-${PYTHON_VERSION}-linux-aarch64-gnu \
|
||||
cpython-${PYTHON_VERSION}-linux-x86_64_v2-gnu
|
||||
|
||||
RUN mkdir -p /install-amd64/usr/lib /install-arm64/usr/lib
|
||||
RUN mv $(uv python dir)/cpython-*-linux-aarch64-gnu/ /install-arm64/usr/local
|
||||
RUN mv $(uv python dir)/cpython-*-linux-x86_64_v2-gnu/ /install-amd64/usr/local
|
||||
|
||||
# Fetch the runtime dependencies debs for both architectures
|
||||
# We do that by building a recursive list of packages we need to download with `apt-cache depends`
|
||||
# and then downloading them with `apt-get download`.
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \
|
||||
gosu \
|
||||
zlib1g \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp7 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libicu \
|
||||
openssl \
|
||||
| grep '^\w' > /tmp/pkg-list && \
|
||||
for arch in arm64 amd64; do \
|
||||
mkdir -p /tmp/debs-${arch} && \
|
||||
cd /tmp/debs-${arch} && \
|
||||
apt-get download $(sed "s/$/:${arch}/" /tmp/pkg-list); \
|
||||
done
|
||||
|
||||
# Extract the debs for each architecture
|
||||
RUN \
|
||||
for arch in arm64 amd64; do \
|
||||
mkdir -p /install-${arch}/var/lib/dpkg/status.d/ && \
|
||||
for deb in /tmp/debs-${arch}/*.deb; do \
|
||||
package_name=$(dpkg-deb -I ${deb} | awk '/^ Package: .*$/ {print $2}'); \
|
||||
echo "Extracting: ${package_name}"; \
|
||||
dpkg --ctrl-tarfile $deb | tar -Ox ./control > /install-${arch}/var/lib/dpkg/status.d/${package_name}; \
|
||||
dpkg --extract $deb /install-${arch}; \
|
||||
done; \
|
||||
done
|
||||
|
||||
|
||||
###
|
||||
### Stage 3: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||
FROM gcr.io/distroless/base-nossl-debian${DEBIAN_VERSION_NUMERIC}:debug
|
||||
|
||||
ARG TARGETARCH
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
curl \
|
||||
gosu \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp7 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libicu72 \
|
||||
libssl-dev \
|
||||
openssl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=runtime-deps /install-${TARGETARCH} /
|
||||
COPY --from=builder /install /usr/local
|
||||
COPY ./docker/start.py /start.py
|
||||
COPY ./docker/conf /conf
|
||||
|
||||
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||
|
||||
SHELL ["/busybox/sh", "-c"]
|
||||
ENTRYPOINT ["/start.py"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
CMD wget --quiet --tries=1 --spider http://localhost:8008/health || exit 1
|
||||
|
||||
+57
-47
@@ -2,66 +2,76 @@
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG DEBIAN_VERSION_NUMERIC=12
|
||||
ARG PYTHON_VERSION=3.12
|
||||
|
||||
# first of all, we create a base image with an nginx which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
# each time.
|
||||
|
||||
FROM docker.io/library/debian:bookworm-slim AS deps_base
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||
redis-server nginx-light
|
||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
# Similarly, a base to copy the redis server from.
|
||||
#
|
||||
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||
# which makes it much easier to copy (but we need to make sure we use an image
|
||||
# based on the same debian version as the synapse image, to make sure we get
|
||||
# the expected version of libc.
|
||||
FROM docker.io/library/redis:7-bookworm AS redis_base
|
||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
||||
# (mounted as --mount=type=cache) and the target directory.
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \
|
||||
redis-server \
|
||||
nginx-light \
|
||||
mawk \
|
||||
| grep '^\w' > /tmp/pkg-list && \
|
||||
mkdir -p /tmp/debs && \
|
||||
cat /tmp/pkg-list && \
|
||||
cd /tmp/debs && \
|
||||
xargs apt-get download </tmp/pkg-list
|
||||
|
||||
# Extract the debs for each architecture
|
||||
RUN \
|
||||
mkdir -p /install/var/lib/dpkg/status.d/ && \
|
||||
for deb in /tmp/debs/*.deb; do \
|
||||
package_name=$(dpkg-deb -I ${deb} | awk '/^ Package: .*$/ {print $2}'); \
|
||||
echo "Extracting: ${package_name}"; \
|
||||
dpkg --ctrl-tarfile $deb | tar -Ox ./control > /install/var/lib/dpkg/status.d/${package_name}; \
|
||||
dpkg --extract $deb /install; \
|
||||
done;
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv pip install --prefix="/install/usr/local" supervisor~=4.2
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM $FROM
|
||||
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install supervisor~=4.2
|
||||
RUN mkdir -p /etc/supervisor/conf.d
|
||||
# Copy over redis, nginx and supervisor
|
||||
COPY --from=deps_base /install /
|
||||
RUN mkdir -p /etc/supervisor/conf.d
|
||||
RUN addgroup -S -g 33 www-data
|
||||
RUN adduser -S -u 33 -G www-data -h /var/www -s /usr/sbin/nologin -H www-data
|
||||
RUN chown www-data /var/lib/nginx
|
||||
|
||||
# Copy over redis and nginx
|
||||
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||
# have nginx log to stderr/out
|
||||
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
||||
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
||||
|
||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
|
||||
COPY --from=deps_base /etc/nginx /etc/nginx
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
RUN mkdir /var/log/nginx /var/lib/nginx
|
||||
RUN chown www-data /var/lib/nginx
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
|
||||
# have nginx log to stderr/out
|
||||
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
||||
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
@@ -114,6 +114,9 @@ The following environment variables are supported in `run` mode:
|
||||
is set via `docker run --user`, defaults to `991`, `991`. Note that this user
|
||||
must have permission to read the config files, and write to the data directories.
|
||||
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
||||
* `SYNAPSE_HTTP_PROXY`: Passed through to the Synapse process as the `http_proxy` environment variable.
|
||||
* `SYNAPSE_HTTPS_PROXY`: Passed through to the Synapse process as the `https_proxy` environment variable.
|
||||
* `SYNAPSE_NO_PROXY`: Passed through to the Synapse process as `no_proxy` environment variable.
|
||||
|
||||
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:
|
||||
|
||||
|
||||
@@ -15,13 +15,15 @@ FROM $FROM
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
|
||||
# This trick only works because (a) the Synapse image happens to have all the
|
||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
# This trick only works because we use a postgres image based on the same debian
|
||||
# version as Synapse's docker image (so the versions of the shared libraries
|
||||
# match).
|
||||
RUN echo "nogroup:x:65534:" >> /etc/group
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib /usr/lib
|
||||
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
|
||||
# initdb expects /bin/sh to be available
|
||||
RUN ln -s /busybox/sh /bin/sh
|
||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||
ENV PGDATA=/var/lib/postgresql/data
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
#
|
||||
# Default ENTRYPOINT for the docker image used for testing synapse with workers under complement
|
||||
|
||||
|
||||
@@ -85,6 +85,14 @@ rc_invites:
|
||||
per_user:
|
||||
per_second: 1000
|
||||
burst_count: 1000
|
||||
per_issuer:
|
||||
per_second: 1000
|
||||
burst_count: 1000
|
||||
|
||||
rc_presence:
|
||||
per_user:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
# This healthcheck script is designed to return OK when every
|
||||
# This healthcheck script is designed to return OK when every
|
||||
# host involved returns OK
|
||||
{%- for healthcheck_url in healthcheck_urls %}
|
||||
curl -fSs {{ healthcheck_url }} || exit 1
|
||||
wget --quiet --tries=1 --spider {{ healthcheck_url }} || exit 1
|
||||
{%- endfor %}
|
||||
|
||||
@@ -38,6 +38,9 @@ server {
|
||||
{% if using_unix_sockets %}
|
||||
proxy_pass http://unix:/run/main_public.sock;
|
||||
{% else %}
|
||||
# note: do not add a path (even a single /) after the port in `proxy_pass`,
|
||||
# otherwise nginx will canonicalise the URI and cause signature verification
|
||||
# errors.
|
||||
proxy_pass http://localhost:8080;
|
||||
{% endif %}
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{% if use_forking_launcher %}
|
||||
[program:synapse_fork]
|
||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
||||
command=/usr/local/bin/python -m synapse.app.complement_fork_starter
|
||||
{{ main_config_path }}
|
||||
synapse.app.homeserver
|
||||
@@ -20,6 +21,7 @@ exitcodes=0
|
||||
|
||||
{% else %}
|
||||
[program:synapse_main]
|
||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
|
||||
--config-path="{{ main_config_path }}"
|
||||
--config-path=/conf/workers/shared.yaml
|
||||
@@ -36,6 +38,7 @@ exitcodes=0
|
||||
|
||||
{% for worker in workers %}
|
||||
[program:synapse_{{ worker.name }}]
|
||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
|
||||
--config-path="{{ main_config_path }}"
|
||||
--config-path=/conf/workers/shared.yaml
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/local/bin/python
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
@@ -1099,6 +1099,13 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
else:
|
||||
log("Could not find %s, will not use" % (jemallocpath,))
|
||||
|
||||
# Empty strings are falsy in Python so this default is fine. We just can't have these
|
||||
# be undefined because supervisord will complain about our
|
||||
# `%(ENV_SYNAPSE_HTTP_PROXY)s` usage.
|
||||
environ.setdefault("SYNAPSE_HTTP_PROXY", "")
|
||||
environ.setdefault("SYNAPSE_HTTPS_PROXY", "")
|
||||
environ.setdefault("SYNAPSE_NO_PROXY", "")
|
||||
|
||||
# Start supervisord, which will start Synapse, all of the configured worker
|
||||
# processes, redis, nginx etc. according to the config we created above.
|
||||
log("Starting supervisord")
|
||||
|
||||
+3
-3
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
#
|
||||
# Prefixes all lines on stdout and stderr with the process name (as determined by
|
||||
# the SUPERVISOR_PROCESS_NAME env var, which is automatically set by Supervisor).
|
||||
@@ -10,6 +10,6 @@
|
||||
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
|
||||
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
|
||||
# confusion due to to interleaving of the different processes.
|
||||
exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1)
|
||||
exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2)
|
||||
exec 1> >(mawk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0; fflush() }' >&1)
|
||||
exec 2> >(mawk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0; fflush() }' >&2)
|
||||
exec "$@"
|
||||
|
||||
@@ -60,10 +60,11 @@ paginate through.
|
||||
anything other than the return value of `next_token` from a previous call. Defaults to `0`.
|
||||
* `dir`: string - Direction of event report order. Whether to fetch the most recent
|
||||
first (`b`) or the oldest first (`f`). Defaults to `b`.
|
||||
* `user_id`: string - Is optional and filters to only return users with user IDs that
|
||||
contain this value. This is the user who reported the event and wrote the reason.
|
||||
* `room_id`: string - Is optional and filters to only return rooms with room IDs that
|
||||
contain this value.
|
||||
* `user_id`: optional string - Filter by the user ID of the reporter. This is the user who reported the event
|
||||
and wrote the reason.
|
||||
* `room_id`: optional string - Filter by room id.
|
||||
* `event_sender_user_id`: optional string - Filter by the sender of the reported event. This is the user who
|
||||
the report was made against.
|
||||
|
||||
**Response**
|
||||
|
||||
|
||||
@@ -385,6 +385,13 @@ The API is:
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/state
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following query parameter is available:
|
||||
|
||||
* `type` - The type of room state event to filter by, eg "m.room.create". If provided, only state events
|
||||
of this type will be returned (regardless of their `state_key` value).
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
```json
|
||||
|
||||
@@ -40,6 +40,7 @@ It returns a JSON body like the following:
|
||||
"erased": false,
|
||||
"shadow_banned": 0,
|
||||
"creation_ts": 1560432506,
|
||||
"last_seen_ts": 1732919539393,
|
||||
"appservice_id": null,
|
||||
"consent_server_notice_sent": null,
|
||||
"consent_version": null,
|
||||
@@ -477,9 +478,9 @@ with a body of:
|
||||
}
|
||||
```
|
||||
|
||||
## List room memberships of a user
|
||||
## List joined rooms of a user
|
||||
|
||||
Gets a list of all `room_id` that a specific `user_id` is member.
|
||||
Gets a list of all `room_id` that a specific `user_id` is joined to and is a member of (participating in).
|
||||
|
||||
The API is:
|
||||
|
||||
@@ -516,6 +517,73 @@ The following fields are returned in the JSON response body:
|
||||
- `joined_rooms` - An array of `room_id`.
|
||||
- `total` - Number of rooms.
|
||||
|
||||
## Get the number of invites sent by the user
|
||||
|
||||
Fetches the number of invites sent by the provided user ID across all rooms
|
||||
after the given timestamp.
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/users/$user_id/sent_invite_count
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
* `user_id`: fully qualified: for example, `@user:server.com`
|
||||
|
||||
The following should be set as query parameters in the URL:
|
||||
|
||||
* `from_ts`: int, required. A timestamp in ms from the unix epoch. Only
|
||||
invites sent at or after the provided timestamp will be returned.
|
||||
This works by comparing the provided timestamp to the `received_ts`
|
||||
column in the `events` table.
|
||||
Note: https://currentmillis.com/ is a useful tool for converting dates
|
||||
into timestamps and vice versa.
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
```json
|
||||
{
|
||||
"invite_count": 30
|
||||
}
|
||||
```
|
||||
|
||||
_Added in Synapse 1.122.0_
|
||||
|
||||
## Get the cumulative number of rooms a user has joined after a given timestamp
|
||||
|
||||
Fetches the number of rooms that the user joined after the given timestamp, even
|
||||
if they have subsequently left/been banned from those rooms.
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/users/$<user_id/cumulative_joined_room_count
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
* `user_id`: fully qualified: for example, `@user:server.com`
|
||||
|
||||
The following should be set as query parameters in the URL:
|
||||
|
||||
* `from_ts`: int, required. A timestamp in ms from the unix epoch. Only
|
||||
invites sent at or after the provided timestamp will be returned.
|
||||
This works by comparing the provided timestamp to the `received_ts`
|
||||
column in the `events` table.
|
||||
Note: https://currentmillis.com/ is a useful tool for converting dates
|
||||
into timestamps and vice versa.
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
```json
|
||||
{
|
||||
"cumulative_joined_room_count": 30
|
||||
}
|
||||
```
|
||||
_Added in Synapse 1.122.0_
|
||||
|
||||
## Account Data
|
||||
Gets information about account data for a specific `user_id`.
|
||||
|
||||
@@ -1444,4 +1512,6 @@ The following fields are returned in the JSON response body:
|
||||
- `failed_redactions` - dictionary - the keys of the dict are event ids the process was unable to redact, if any, and the values are
|
||||
the corresponding error that caused the redaction to fail
|
||||
|
||||
_Added in Synapse 1.116.0._
|
||||
_Added in Synapse 1.116.0._
|
||||
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -245,7 +245,7 @@ this callback.
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile) -> bool
|
||||
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile, requester_id: str) -> bool
|
||||
```
|
||||
|
||||
Called when computing search results in the user directory. The module must return a
|
||||
@@ -264,6 +264,8 @@ The profile is represented as a dictionary with the following keys:
|
||||
The module is given a copy of the original dictionary, so modifying it from within the
|
||||
module cannot modify a user's profile when included in user directory search results.
|
||||
|
||||
The requester_id parameter is the ID of the user that called the user directory API.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `False`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||
|
||||
@@ -74,7 +74,7 @@ server {
|
||||
proxy_pass http://localhost:8008;
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Host $host:$server_port;
|
||||
|
||||
# Nginx by default only allows file uploads up to 1M in size
|
||||
# Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
||||
|
||||
@@ -157,7 +157,7 @@ sudo pip install py-bcrypt
|
||||
|
||||
#### Alpine Linux
|
||||
|
||||
6543 maintains [Synapse packages for Alpine Linux](https://pkgs.alpinelinux.org/packages?name=synapse&branch=edge) in the community repository. Install with:
|
||||
Jahway603 maintains [Synapse packages for Alpine Linux](https://pkgs.alpinelinux.org/packages?name=synapse&branch=edge) in the community repository. Install with:
|
||||
|
||||
```sh
|
||||
sudo apk add synapse
|
||||
@@ -310,29 +310,18 @@ sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
sudo dnf group install "Development Tools"
|
||||
```
|
||||
|
||||
##### Red Hat Enterprise Linux / Rocky Linux
|
||||
##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux
|
||||
|
||||
*Note: The term "RHEL" below refers to both Red Hat Enterprise Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
|
||||
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
|
||||
|
||||
It's recommended to use the latest Python versions.
|
||||
|
||||
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ship with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
|
||||
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ships with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
|
||||
|
||||
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
||||
|
||||
These commands should be run as root user.
|
||||
|
||||
RHEL 8
|
||||
```bash
|
||||
# Enable PowerTools repository
|
||||
dnf config-manager --set-enabled powertools
|
||||
```
|
||||
RHEL 9
|
||||
```bash
|
||||
# Enable CodeReady Linux Builder repository
|
||||
crb enable
|
||||
```
|
||||
|
||||
Install new version of Python. You only need one of these:
|
||||
```bash
|
||||
# Python 3.11
|
||||
|
||||
@@ -72,8 +72,8 @@ class ExampleSpamChecker:
|
||||
async def user_may_publish_room(self, userid, room_id):
|
||||
return True # allow publishing of all rooms
|
||||
|
||||
async def check_username_for_spam(self, user_profile):
|
||||
return False # allow all usernames
|
||||
async def check_username_for_spam(self, user_profile, requester_id):
|
||||
return False # allow all usernames regardless of requester
|
||||
|
||||
async def check_registration_for_spam(
|
||||
self,
|
||||
|
||||
@@ -117,6 +117,14 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.122.0
|
||||
|
||||
## Dropping support for PostgreSQL 11 and 12
|
||||
|
||||
In line with our [deprecation policy](deprecation_policy.md), we've dropped
|
||||
support for PostgreSQL 11 and 12, as they are no longer supported upstream.
|
||||
This release of Synapse requires PostgreSQL 13+.
|
||||
|
||||
# Upgrading to v1.120.0
|
||||
|
||||
## Removal of experimental MSC3886 feature
|
||||
|
||||
@@ -162,6 +162,53 @@ Example configuration:
|
||||
pid_file: DATADIR/homeserver.pid
|
||||
```
|
||||
---
|
||||
### `daemonize`
|
||||
|
||||
Specifies whether Synapse should be started as a daemon process. If Synapse is being
|
||||
managed by [systemd](../../systemd-with-workers/), this option must be omitted or set to
|
||||
`false`.
|
||||
|
||||
This can also be set by the `--daemonize` (`-D`) argument when starting Synapse.
|
||||
|
||||
See `worker_daemonize` for more information on daemonizing workers.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
daemonize: true
|
||||
```
|
||||
---
|
||||
### `print_pidfile`
|
||||
|
||||
Print the path to the pidfile just before daemonizing. Defaults to false.
|
||||
|
||||
This can also be set by the `--print-pidfile` argument when starting Synapse.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
print_pidfile: true
|
||||
```
|
||||
---
|
||||
### `user_agent_suffix`
|
||||
|
||||
A suffix that is appended to the Synapse user-agent (ex. `Synapse/v1.123.0`). Defaults
|
||||
to None
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
user_agent_suffix: " (I'm a teapot; Linux x86_64)"
|
||||
```
|
||||
---
|
||||
### `use_frozen_dicts`
|
||||
|
||||
Determines whether we should freeze the internal dict object in `FrozenEvent`. Freezing
|
||||
prevents bugs where we accidentally share e.g. signature dicts. However, freezing a
|
||||
dict is expensive. Defaults to false.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
use_frozen_dicts: true
|
||||
```
|
||||
---
|
||||
### `web_client_location`
|
||||
|
||||
The absolute URL to the web client which `/` will redirect to. Defaults to none.
|
||||
@@ -595,6 +642,17 @@ listeners:
|
||||
- names: [client, federation]
|
||||
```
|
||||
|
||||
---
|
||||
### `manhole`
|
||||
|
||||
Turn on the Twisted telnet manhole service on the given port. Defaults to none.
|
||||
|
||||
This can also be set by the `--manhole` argument when starting Synapse.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
manhole: 1234
|
||||
```
|
||||
---
|
||||
### `manhole_settings`
|
||||
|
||||
@@ -673,8 +731,9 @@ This setting has the following sub-options:
|
||||
TLS via STARTTLS *if the SMTP server supports it*. If this option is set,
|
||||
Synapse will refuse to connect unless the server supports STARTTLS.
|
||||
* `enable_tls`: By default, if the server supports TLS, it will be used, and the server
|
||||
must present a certificate that is valid for 'smtp_host'. If this option
|
||||
must present a certificate that is valid for `tlsname`. If this option
|
||||
is set to false, TLS will not be used.
|
||||
* `tlsname`: The domain name the SMTP server's TLS certificate must be valid for, defaulting to `smtp_host`.
|
||||
* `notif_from`: defines the "From" address to use when sending emails.
|
||||
It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name,
|
||||
which is normally set in `app_name`, but may be overridden by the
|
||||
@@ -741,6 +800,7 @@ email:
|
||||
force_tls: true
|
||||
require_transport_security: true
|
||||
enable_tls: false
|
||||
tlsname: mail.server.example.com
|
||||
notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>"
|
||||
app_name: my_branded_matrix_server
|
||||
enable_notifs: true
|
||||
@@ -1866,6 +1926,27 @@ rc_federation:
|
||||
concurrent: 5
|
||||
```
|
||||
---
|
||||
### `rc_presence`
|
||||
|
||||
This option sets ratelimiting for presence.
|
||||
|
||||
The `rc_presence.per_user` option sets rate limits on how often a specific
|
||||
users' presence updates are evaluated. Ratelimited presence updates sent via sync are
|
||||
ignored, and no error is returned to the client.
|
||||
This option also sets the rate limit for the
|
||||
[`PUT /_matrix/client/v3/presence/{userId}/status`](https://spec.matrix.org/latest/client-server-api/#put_matrixclientv3presenceuseridstatus)
|
||||
endpoint.
|
||||
|
||||
`per_user` defaults to `per_second: 0.1`, `burst_count: 1`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
rc_presence:
|
||||
per_user:
|
||||
per_second: 0.05
|
||||
burst_count: 1
|
||||
```
|
||||
---
|
||||
### `federation_rr_transactions_per_room_per_second`
|
||||
|
||||
Sets outgoing federation transaction frequency for sending read-receipts,
|
||||
@@ -2511,6 +2592,14 @@ This is primarily intended for use with the `register_new_matrix_user` script
|
||||
(see [Registering a user](../../setup/installation.md#registering-a-user));
|
||||
however, the interface is [documented](../../admin_api/register_api.html).
|
||||
|
||||
Replacing an existing `registration_shared_secret` with a new one requires users
|
||||
of the [Shared-Secret Registration API](../../admin_api/register_api.html) to
|
||||
start using the new secret for requesting any further one-time nonces.
|
||||
|
||||
> ⚠️ **Warning** – The additional consequences of replacing
|
||||
> [`macaroon_secret_key`](#macaroon_secret_key) will apply in case it delegates
|
||||
> to `registration_shared_secret`.
|
||||
|
||||
See also [`registration_shared_secret_path`](#registration_shared_secret_path).
|
||||
|
||||
Example configuration:
|
||||
@@ -3087,10 +3176,31 @@ A secret which is used to sign
|
||||
If none is specified, the `registration_shared_secret` is used, if one is given;
|
||||
otherwise, a secret key is derived from the signing key.
|
||||
|
||||
> ⚠️ **Warning** – Replacing an existing `macaroon_secret_key` with a new one
|
||||
> will lead to invalidation of access tokens for all guest users. It will also
|
||||
> break unsubscribe links in emails sent before the change. An unlucky user
|
||||
> might encounter a broken SSO login flow and would have to start again.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
macaroon_secret_key: <PRIVATE STRING>
|
||||
```
|
||||
---
|
||||
### `macaroon_secret_key_path`
|
||||
|
||||
An alternative to [`macaroon_secret_key`](#macaroon_secret_key):
|
||||
allows the secret key to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the secret key.
|
||||
Synapse reads the secret key from the given file once at startup.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
macaroon_secret_key_path: /path/to/secrets/file
|
||||
```
|
||||
|
||||
_Added in Synapse 1.121.0._
|
||||
|
||||
---
|
||||
### `form_secret`
|
||||
|
||||
@@ -3098,6 +3208,9 @@ A secret which is used to calculate HMACs for form values, to stop
|
||||
falsification of values. Must be specified for the User Consent
|
||||
forms to work.
|
||||
|
||||
Replacing an existing `form_secret` with a new one might break the user consent
|
||||
page for an unlucky user and require them to reopen the page from a new link.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
form_secret: <PRIVATE STRING>
|
||||
@@ -3298,8 +3411,9 @@ This setting has the following sub-options:
|
||||
The default is 'uid'.
|
||||
* `attribute_requirements`: It is possible to configure Synapse to only allow logins if SAML attributes
|
||||
match particular values. The requirements can be listed under
|
||||
`attribute_requirements` as shown in the example. All of the listed attributes must
|
||||
match for the login to be permitted.
|
||||
`attribute_requirements` as shown in the example. All of the listed attributes must
|
||||
match for the login to be permitted. Values can be specified in a `one_of` list to allow
|
||||
multiple values for an attribute.
|
||||
* `idp_entityid`: If the metadata XML contains multiple IdP entities then the `idp_entityid`
|
||||
option must be set to the entity to redirect users to.
|
||||
Most deployments only have a single IdP entity and so should omit this option.
|
||||
@@ -3380,7 +3494,9 @@ saml2_config:
|
||||
- attribute: userGroup
|
||||
value: "staff"
|
||||
- attribute: department
|
||||
value: "sales"
|
||||
one_of:
|
||||
- "sales"
|
||||
- "admins"
|
||||
|
||||
idp_entityid: 'https://our_idp/entityid'
|
||||
```
|
||||
@@ -4343,6 +4459,9 @@ HTTP requests from workers.
|
||||
The default, this value is omitted (equivalently `null`), which means that
|
||||
traffic between the workers and the main process is not authenticated.
|
||||
|
||||
Replacing an existing `worker_replication_secret` with a new one will break
|
||||
communication with all workers that have not yet updated their secret.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
worker_replication_secret: "secret_secret"
|
||||
@@ -4447,6 +4566,10 @@ instance_map:
|
||||
worker1:
|
||||
host: localhost
|
||||
port: 8034
|
||||
other:
|
||||
host: localhost
|
||||
port: 8035
|
||||
tls: true
|
||||
```
|
||||
Example configuration(#2, for UNIX sockets):
|
||||
```yaml
|
||||
|
||||
+7
-11
@@ -273,17 +273,6 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/knock/
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/profile/
|
||||
|
||||
# Account data requests
|
||||
^/_matrix/client/(r0|v3|unstable)/.*/tags
|
||||
^/_matrix/client/(r0|v3|unstable)/.*/account_data
|
||||
|
||||
# Receipts requests
|
||||
^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt
|
||||
^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers
|
||||
|
||||
# Presence requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/
|
||||
|
||||
# User directory search requests
|
||||
^/_matrix/client/(r0|v3|unstable)/user_directory/search$
|
||||
|
||||
@@ -292,6 +281,13 @@ Additionally, the following REST endpoints can be handled for GET requests:
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
||||
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
|
||||
|
||||
# Account data requests
|
||||
^/_matrix/client/(r0|v3|unstable)/.*/tags
|
||||
^/_matrix/client/(r0|v3|unstable)/.*/account_data
|
||||
|
||||
# Presence requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/
|
||||
|
||||
Pagination requests can also be handled, but all requests for a given
|
||||
room must be routed to the same instance. Additionally, care must be taken to
|
||||
ensure that the purge history admin API is not used while pagination requests
|
||||
|
||||
Generated
+487
-446
File diff suppressed because it is too large
Load Diff
+1
-1
@@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.121.0rc1"
|
||||
version = "1.125.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
+1
-1
@@ -34,7 +34,7 @@ pyo3 = { version = "0.23.2", features = [
|
||||
"macros",
|
||||
"anyhow",
|
||||
"abi3",
|
||||
"abi3-py38",
|
||||
"abi3-py39",
|
||||
] }
|
||||
pyo3-log = "0.12.0"
|
||||
pythonize = "0.23.0"
|
||||
|
||||
@@ -42,12 +42,12 @@ from typing import (
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
cast,
|
||||
)
|
||||
|
||||
import yaml
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from twisted.internet import defer, reactor as reactor_
|
||||
|
||||
|
||||
@@ -18,9 +18,7 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
|
||||
from typing_extensions import Protocol
|
||||
from typing import TYPE_CHECKING, Optional, Protocol, Tuple
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from authlib.oauth2 import ClientAuth
|
||||
@@ -119,7 +119,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
self._clock = hs.get_clock()
|
||||
self._http_client = hs.get_proxied_http_client()
|
||||
self._hostname = hs.hostname
|
||||
self._admin_token = self._config.admin_token
|
||||
self._admin_token: Callable[[], Optional[str]] = self._config.admin_token
|
||||
|
||||
self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata](
|
||||
self._load_metadata
|
||||
@@ -133,9 +133,10 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
)
|
||||
else:
|
||||
# Else use the client secret
|
||||
assert self._config.client_secret, "No client_secret provided"
|
||||
client_secret = self._config.client_secret()
|
||||
assert client_secret, "No client_secret provided"
|
||||
self._client_auth = ClientAuth(
|
||||
self._config.client_id, self._config.client_secret, auth_method
|
||||
self._config.client_id, client_secret, auth_method
|
||||
)
|
||||
|
||||
async def _load_metadata(self) -> OpenIDProviderMetadata:
|
||||
@@ -174,6 +175,12 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
logger.warning("Failed to load metadata:", exc_info=True)
|
||||
return None
|
||||
|
||||
async def auth_metadata(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Returns the auth metadata dict
|
||||
"""
|
||||
return await self._issuer_metadata.get()
|
||||
|
||||
async def _introspection_endpoint(self) -> str:
|
||||
"""
|
||||
Returns the introspection endpoint of the issuer
|
||||
@@ -277,7 +284,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
requester = await self.get_user_by_access_token(access_token, allow_expired)
|
||||
|
||||
# Do not record requests from MAS using the virtual `__oidc_admin` user.
|
||||
if access_token != self._admin_token:
|
||||
if access_token != self._admin_token():
|
||||
await self._record_request(request, requester)
|
||||
|
||||
if not allow_guest and requester.is_guest:
|
||||
@@ -318,7 +325,8 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
token: str,
|
||||
allow_expired: bool = False,
|
||||
) -> Requester:
|
||||
if self._admin_token is not None and token == self._admin_token:
|
||||
admin_token = self._admin_token()
|
||||
if admin_token is not None and token == admin_token:
|
||||
# XXX: This is a temporary solution so that the admin API can be called by
|
||||
# the OIDC provider. This will be removed once we have OIDC client
|
||||
# credentials grant support in matrix-authentication-service.
|
||||
|
||||
@@ -320,3 +320,8 @@ class ApprovalNoticeMedium:
|
||||
class Direction(enum.Enum):
|
||||
BACKWARDS = "b"
|
||||
FORWARDS = "f"
|
||||
|
||||
|
||||
class ProfileFields:
|
||||
DISPLAYNAME: Final = "displayname"
|
||||
AVATAR_URL: Final = "avatar_url"
|
||||
|
||||
@@ -132,6 +132,10 @@ class Codes(str, Enum):
|
||||
# connection.
|
||||
UNKNOWN_POS = "M_UNKNOWN_POS"
|
||||
|
||||
# Part of MSC4133
|
||||
PROFILE_TOO_LARGE = "M_PROFILE_TOO_LARGE"
|
||||
KEY_TOO_LARGE = "M_KEY_TOO_LARGE"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
"""An exception with integer code, a message string attributes and optional headers.
|
||||
|
||||
@@ -275,6 +275,7 @@ class Ratelimiter:
|
||||
update: bool = True,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: Optional[float] = None,
|
||||
pause: Optional[float] = 0.5,
|
||||
) -> None:
|
||||
"""Checks if an action can be performed. If not, raises a LimitExceededError
|
||||
|
||||
@@ -298,6 +299,8 @@ class Ratelimiter:
|
||||
at all.
|
||||
_time_now_s: The current time. Optional, defaults to the current time according
|
||||
to self.clock. Only used by tests.
|
||||
pause: Time in seconds to pause when an action is being limited. Defaults to 0.5
|
||||
to stop clients from "tight-looping" on retrying their request.
|
||||
|
||||
Raises:
|
||||
LimitExceededError: If an action could not be performed, along with the time in
|
||||
@@ -316,9 +319,8 @@ class Ratelimiter:
|
||||
)
|
||||
|
||||
if not allowed:
|
||||
# We pause for a bit here to stop clients from "tight-looping" on
|
||||
# retrying their request.
|
||||
await self.clock.sleep(0.5)
|
||||
if pause:
|
||||
await self.clock.sleep(pause)
|
||||
|
||||
raise LimitExceededError(
|
||||
limiter_name=self._limiter_name,
|
||||
|
||||
+17
-2
@@ -221,9 +221,13 @@ class Config:
|
||||
The number of milliseconds in the duration.
|
||||
|
||||
Raises:
|
||||
TypeError, if given something other than an integer or a string
|
||||
TypeError: if given something other than an integer or a string, or the
|
||||
duration is using an incorrect suffix.
|
||||
ValueError: if given a string not of the form described above.
|
||||
"""
|
||||
# For integers, we prefer to use `type(value) is int` instead of
|
||||
# `isinstance(value, int)` because we want to exclude subclasses of int, such as
|
||||
# bool.
|
||||
if type(value) is int: # noqa: E721
|
||||
return value
|
||||
elif isinstance(value, str):
|
||||
@@ -246,9 +250,20 @@ class Config:
|
||||
if suffix in sizes:
|
||||
value = value[:-1]
|
||||
size = sizes[suffix]
|
||||
elif suffix.isdigit():
|
||||
# No suffix is treated as milliseconds.
|
||||
value = value
|
||||
size = 1
|
||||
else:
|
||||
raise TypeError(
|
||||
f"Bad duration suffix {value} (expected no suffix or one of these suffixes: {sizes.keys()})"
|
||||
)
|
||||
|
||||
return int(value) * size
|
||||
else:
|
||||
raise TypeError(f"Bad duration {value!r}")
|
||||
raise TypeError(
|
||||
f"Bad duration type {value!r} (expected int or string duration)"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def abspath(file_path: str) -> str:
|
||||
|
||||
@@ -110,6 +110,7 @@ class EmailConfig(Config):
|
||||
raise ConfigError(
|
||||
"email.require_transport_security requires email.enable_tls to be true"
|
||||
)
|
||||
self.email_tlsname = email_config.get("tlsname", None)
|
||||
|
||||
if "app_name" in email_config:
|
||||
self.email_app_name = email_config["app_name"]
|
||||
|
||||
@@ -20,14 +20,15 @@
|
||||
#
|
||||
|
||||
import enum
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
from functools import cache
|
||||
from typing import TYPE_CHECKING, Any, Iterable, Optional
|
||||
|
||||
import attr
|
||||
import attr.validators
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config._base import Config, RootConfig
|
||||
from synapse.config._base import Config, RootConfig, read_file
|
||||
from synapse.types import JsonDict
|
||||
|
||||
# Determine whether authlib is installed.
|
||||
@@ -43,6 +44,12 @@ if TYPE_CHECKING:
|
||||
from authlib.jose.rfc7517 import JsonWebKey
|
||||
|
||||
|
||||
@cache
|
||||
def read_secret_from_file_once(file_path: Any, config_path: Iterable[str]) -> str:
|
||||
"""Returns the memoized secret read from file."""
|
||||
return read_file(file_path, config_path).strip()
|
||||
|
||||
|
||||
class ClientAuthMethod(enum.Enum):
|
||||
"""List of supported client auth methods."""
|
||||
|
||||
@@ -63,6 +70,40 @@ def _parse_jwks(jwks: Optional[JsonDict]) -> Optional["JsonWebKey"]:
|
||||
return JsonWebKey.import_key(jwks)
|
||||
|
||||
|
||||
def _check_client_secret(
|
||||
instance: "MSC3861", _attribute: attr.Attribute, _value: Optional[str]
|
||||
) -> None:
|
||||
if instance._client_secret and instance._client_secret_path:
|
||||
raise ConfigError(
|
||||
(
|
||||
"You have configured both "
|
||||
"`experimental_features.msc3861.client_secret` and "
|
||||
"`experimental_features.msc3861.client_secret_path`. "
|
||||
"These are mutually incompatible."
|
||||
),
|
||||
("experimental", "msc3861", "client_secret"),
|
||||
)
|
||||
# Check client secret can be retrieved
|
||||
instance.client_secret()
|
||||
|
||||
|
||||
def _check_admin_token(
|
||||
instance: "MSC3861", _attribute: attr.Attribute, _value: Optional[str]
|
||||
) -> None:
|
||||
if instance._admin_token and instance._admin_token_path:
|
||||
raise ConfigError(
|
||||
(
|
||||
"You have configured both "
|
||||
"`experimental_features.msc3861.admin_token` and "
|
||||
"`experimental_features.msc3861.admin_token_path`. "
|
||||
"These are mutually incompatible."
|
||||
),
|
||||
("experimental", "msc3861", "admin_token"),
|
||||
)
|
||||
# Check client secret can be retrieved
|
||||
instance.admin_token()
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class MSC3861:
|
||||
"""Configuration for MSC3861: Matrix architecture change to delegate authentication via OIDC"""
|
||||
@@ -97,15 +138,30 @@ class MSC3861:
|
||||
)
|
||||
"""The auth method used when calling the introspection endpoint."""
|
||||
|
||||
client_secret: Optional[str] = attr.ib(
|
||||
_client_secret: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=attr.validators.optional(attr.validators.instance_of(str)),
|
||||
validator=[
|
||||
attr.validators.optional(attr.validators.instance_of(str)),
|
||||
_check_client_secret,
|
||||
],
|
||||
)
|
||||
"""
|
||||
The client secret to use when calling the introspection endpoint,
|
||||
when using any of the client_secret_* client auth methods.
|
||||
"""
|
||||
|
||||
_client_secret_path: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=[
|
||||
attr.validators.optional(attr.validators.instance_of(str)),
|
||||
_check_client_secret,
|
||||
],
|
||||
)
|
||||
"""
|
||||
Alternative to `client_secret`: allows the secret to be specified in an
|
||||
external file.
|
||||
"""
|
||||
|
||||
jwk: Optional["JsonWebKey"] = attr.ib(default=None, converter=_parse_jwks)
|
||||
"""
|
||||
The JWKS to use when calling the introspection endpoint,
|
||||
@@ -133,7 +189,7 @@ class MSC3861:
|
||||
ClientAuthMethod.CLIENT_SECRET_BASIC,
|
||||
ClientAuthMethod.CLIENT_SECRET_JWT,
|
||||
)
|
||||
and self.client_secret is None
|
||||
and self.client_secret() is None
|
||||
):
|
||||
raise ConfigError(
|
||||
f"A client secret must be provided when using the {value} client auth method",
|
||||
@@ -152,15 +208,48 @@ class MSC3861:
|
||||
)
|
||||
"""The URL of the My Account page on the OIDC Provider as per MSC2965."""
|
||||
|
||||
admin_token: Optional[str] = attr.ib(
|
||||
_admin_token: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=attr.validators.optional(attr.validators.instance_of(str)),
|
||||
validator=[
|
||||
attr.validators.optional(attr.validators.instance_of(str)),
|
||||
_check_admin_token,
|
||||
],
|
||||
)
|
||||
"""
|
||||
A token that should be considered as an admin token.
|
||||
This is used by the OIDC provider, to make admin calls to Synapse.
|
||||
"""
|
||||
|
||||
_admin_token_path: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=[
|
||||
attr.validators.optional(attr.validators.instance_of(str)),
|
||||
_check_admin_token,
|
||||
],
|
||||
)
|
||||
"""
|
||||
Alternative to `admin_token`: allows the secret to be specified in an
|
||||
external file.
|
||||
"""
|
||||
|
||||
def client_secret(self) -> Optional[str]:
|
||||
"""Returns the secret given via `client_secret` or `client_secret_path`."""
|
||||
if self._client_secret_path:
|
||||
return read_secret_from_file_once(
|
||||
self._client_secret_path,
|
||||
("experimental_features", "msc3861", "client_secret_path"),
|
||||
)
|
||||
return self._client_secret
|
||||
|
||||
def admin_token(self) -> Optional[str]:
|
||||
"""Returns the admin token given via `admin_token` or `admin_token_path`."""
|
||||
if self._admin_token_path:
|
||||
return read_secret_from_file_once(
|
||||
self._admin_token_path,
|
||||
("experimental_features", "msc3861", "admin_token_path"),
|
||||
)
|
||||
return self._admin_token
|
||||
|
||||
def check_config_conflicts(self, root: RootConfig) -> None:
|
||||
"""Checks for any configuration conflicts with other parts of Synapse.
|
||||
|
||||
@@ -436,8 +525,8 @@ class ExperimentalConfig(Config):
|
||||
("experimental", "msc4108_delegation_endpoint"),
|
||||
)
|
||||
|
||||
# MSC4151: Report room API (Client-Server API)
|
||||
self.msc4151_enabled: bool = experimental.get("msc4151_enabled", False)
|
||||
# MSC4133: Custom profile fields
|
||||
self.msc4133_enabled: bool = experimental.get("msc4133_enabled", False)
|
||||
|
||||
# MSC4210: Remove legacy mentions
|
||||
self.msc4210_enabled: bool = experimental.get("msc4210_enabled", False)
|
||||
|
||||
+16
-5
@@ -43,7 +43,7 @@ from unpaddedbase64 import decode_base64
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.stringutils import random_string, random_string_with_symbols
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
from ._base import Config, ConfigError, read_file
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from signedjson.key import VerifyKeyWithExpiry
|
||||
@@ -91,6 +91,11 @@ To suppress this warning and continue using 'matrix.org', admins should set
|
||||
'suppress_key_server_warning' to 'true' in homeserver.yaml.
|
||||
--------------------------------------------------------------------------------"""
|
||||
|
||||
CONFLICTING_MACAROON_SECRET_KEY_OPTS_ERROR = """\
|
||||
Conflicting options 'macaroon_secret_key' and 'macaroon_secret_key_path' are
|
||||
both defined in config file.
|
||||
"""
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -166,10 +171,16 @@ class KeyConfig(Config):
|
||||
)
|
||||
)
|
||||
|
||||
macaroon_secret_key: Optional[str] = config.get(
|
||||
"macaroon_secret_key", self.root.registration.registration_shared_secret
|
||||
)
|
||||
|
||||
macaroon_secret_key = config.get("macaroon_secret_key")
|
||||
macaroon_secret_key_path = config.get("macaroon_secret_key_path")
|
||||
if macaroon_secret_key_path:
|
||||
if macaroon_secret_key:
|
||||
raise ConfigError(CONFLICTING_MACAROON_SECRET_KEY_OPTS_ERROR)
|
||||
macaroon_secret_key = read_file(
|
||||
macaroon_secret_key_path, "macaroon_secret_key_path"
|
||||
).strip()
|
||||
if not macaroon_secret_key:
|
||||
macaroon_secret_key = self.root.registration.registration_shared_secret
|
||||
if not macaroon_secret_key:
|
||||
# Unfortunately, there are people out there that don't have this
|
||||
# set. Lets just be "nice" and derive one from their secret key.
|
||||
|
||||
@@ -228,3 +228,9 @@ class RatelimitConfig(Config):
|
||||
config.get("remote_media_download_burst_count", "500M")
|
||||
),
|
||||
)
|
||||
|
||||
self.rc_presence_per_user = RatelimitSettings.parse(
|
||||
config,
|
||||
"rc_presence.per_user",
|
||||
defaults={"per_second": 0.1, "burst_count": 1},
|
||||
)
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from urllib.request import getproxies_environment # type: ignore
|
||||
from urllib.request import getproxies_environment
|
||||
|
||||
import attr
|
||||
|
||||
|
||||
+15
-5
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import attr
|
||||
|
||||
@@ -43,13 +43,23 @@ class SsoAttributeRequirement:
|
||||
"""Object describing a single requirement for SSO attributes."""
|
||||
|
||||
attribute: str
|
||||
# If a value is not given, than the attribute must simply exist.
|
||||
value: Optional[str]
|
||||
# If neither value nor one_of is given, the attribute must simply exist. This is
|
||||
# only true for CAS configs which use a different JSON schema than the one below.
|
||||
value: Optional[str] = None
|
||||
one_of: Optional[List[str]] = None
|
||||
|
||||
JSON_SCHEMA = {
|
||||
"type": "object",
|
||||
"properties": {"attribute": {"type": "string"}, "value": {"type": "string"}},
|
||||
"required": ["attribute", "value"],
|
||||
"properties": {
|
||||
"attribute": {"type": "string"},
|
||||
"value": {"type": "string"},
|
||||
"one_of": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
"required": ["attribute"],
|
||||
"oneOf": [
|
||||
{"required": ["value"]},
|
||||
{"required": ["one_of"]},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ from typing import (
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
Protocol,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
@@ -41,7 +42,6 @@ from typing import (
|
||||
from canonicaljson import encode_canonical_json
|
||||
from signedjson.key import decode_verify_key_bytes
|
||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
||||
from typing_extensions import Protocol
|
||||
from unpaddedbase64 import decode_base64
|
||||
|
||||
from synapse.api.constants import (
|
||||
@@ -566,6 +566,7 @@ def _is_membership_change_allowed(
|
||||
logger.debug(
|
||||
"_is_membership_change_allowed: %s",
|
||||
{
|
||||
"caller_membership": caller.membership if caller else None,
|
||||
"caller_in_room": caller_in_room,
|
||||
"caller_invited": caller_invited,
|
||||
"caller_knocked": caller_knocked,
|
||||
@@ -677,7 +678,8 @@ def _is_membership_change_allowed(
|
||||
and join_rule == JoinRules.KNOCK_RESTRICTED
|
||||
)
|
||||
):
|
||||
if not caller_in_room and not caller_invited:
|
||||
# You can only join the room if you are invited or are already in the room.
|
||||
if not (caller_in_room or caller_invited):
|
||||
raise AuthError(403, "You are not invited to this room.")
|
||||
else:
|
||||
# TODO (erikj): may_join list
|
||||
|
||||
@@ -30,6 +30,7 @@ from typing import (
|
||||
Generic,
|
||||
Iterable,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
@@ -39,10 +40,9 @@ from typing import (
|
||||
)
|
||||
|
||||
import attr
|
||||
from typing_extensions import Literal
|
||||
from unpaddedbase64 import encode_base64
|
||||
|
||||
from synapse.api.constants import RelationTypes
|
||||
from synapse.api.constants import EventTypes, RelationTypes
|
||||
from synapse.api.room_versions import EventFormatVersions, RoomVersion, RoomVersions
|
||||
from synapse.synapse_rust.events import EventInternalMetadata
|
||||
from synapse.types import JsonDict, StrCollection
|
||||
@@ -325,12 +325,17 @@ class EventBase(metaclass=abc.ABCMeta):
|
||||
def __repr__(self) -> str:
|
||||
rejection = f"REJECTED={self.rejected_reason}, " if self.rejected_reason else ""
|
||||
|
||||
conditional_membership_string = ""
|
||||
if self.get("type") == EventTypes.Member:
|
||||
conditional_membership_string = f"membership={self.membership}, "
|
||||
|
||||
return (
|
||||
f"<{self.__class__.__name__} "
|
||||
f"{rejection}"
|
||||
f"event_id={self.event_id}, "
|
||||
f"type={self.get('type')}, "
|
||||
f"state_key={self.get('state_key')}, "
|
||||
f"{conditional_membership_string}"
|
||||
f"outlier={self.internal_metadata.is_outlier()}"
|
||||
">"
|
||||
)
|
||||
|
||||
@@ -66,50 +66,67 @@ class InviteAutoAccepter:
|
||||
event: The incoming event.
|
||||
"""
|
||||
# Check if the event is an invite for a local user.
|
||||
is_invite_for_local_user = (
|
||||
event.type == EventTypes.Member
|
||||
and event.is_state()
|
||||
and event.membership == Membership.INVITE
|
||||
and self._api.is_mine(event.state_key)
|
||||
)
|
||||
if (
|
||||
event.type != EventTypes.Member
|
||||
or event.is_state() is False
|
||||
or event.membership != Membership.INVITE
|
||||
or self._api.is_mine(event.state_key) is False
|
||||
):
|
||||
return
|
||||
|
||||
# Only accept invites for direct messages if the configuration mandates it.
|
||||
is_direct_message = event.content.get("is_direct", False)
|
||||
is_allowed_by_direct_message_rules = (
|
||||
not self._config.accept_invites_only_for_direct_messages
|
||||
or is_direct_message is True
|
||||
)
|
||||
if (
|
||||
self._config.accept_invites_only_for_direct_messages
|
||||
and is_direct_message is False
|
||||
):
|
||||
return
|
||||
|
||||
# Only accept invites from remote users if the configuration mandates it.
|
||||
is_from_local_user = self._api.is_mine(event.sender)
|
||||
is_allowed_by_local_user_rules = (
|
||||
not self._config.accept_invites_only_from_local_users
|
||||
or is_from_local_user is True
|
||||
if (
|
||||
self._config.accept_invites_only_from_local_users
|
||||
and is_from_local_user is False
|
||||
):
|
||||
return
|
||||
|
||||
# Check the user is activated.
|
||||
recipient = await self._api.get_userinfo_by_id(event.state_key)
|
||||
|
||||
# Ignore if the user doesn't exist.
|
||||
if recipient is None:
|
||||
return
|
||||
|
||||
# Never accept invites for deactivated users.
|
||||
if recipient.is_deactivated:
|
||||
return
|
||||
|
||||
# Never accept invites for suspended users.
|
||||
if recipient.suspended:
|
||||
return
|
||||
|
||||
# Never accept invites for locked users.
|
||||
if recipient.locked:
|
||||
return
|
||||
|
||||
# Make the user join the room. We run this as a background process to circumvent a race condition
|
||||
# that occurs when responding to invites over federation (see https://github.com/matrix-org/synapse-auto-accept-invite/issues/12)
|
||||
run_as_background_process(
|
||||
"retry_make_join",
|
||||
self._retry_make_join,
|
||||
event.state_key,
|
||||
event.state_key,
|
||||
event.room_id,
|
||||
"join",
|
||||
bg_start_span=False,
|
||||
)
|
||||
|
||||
if (
|
||||
is_invite_for_local_user
|
||||
and is_allowed_by_direct_message_rules
|
||||
and is_allowed_by_local_user_rules
|
||||
):
|
||||
# Make the user join the room. We run this as a background process to circumvent a race condition
|
||||
# that occurs when responding to invites over federation (see https://github.com/matrix-org/synapse-auto-accept-invite/issues/12)
|
||||
run_as_background_process(
|
||||
"retry_make_join",
|
||||
self._retry_make_join,
|
||||
event.state_key,
|
||||
event.state_key,
|
||||
event.room_id,
|
||||
"join",
|
||||
bg_start_span=False,
|
||||
if is_direct_message:
|
||||
# Mark this room as a direct message!
|
||||
await self._mark_room_as_direct_message(
|
||||
event.state_key, event.sender, event.room_id
|
||||
)
|
||||
|
||||
if is_direct_message:
|
||||
# Mark this room as a direct message!
|
||||
await self._mark_room_as_direct_message(
|
||||
event.state_key, event.sender, event.room_id
|
||||
)
|
||||
|
||||
async def _mark_room_as_direct_message(
|
||||
self, user_id: str, dm_user_id: str, room_id: str
|
||||
) -> None:
|
||||
|
||||
@@ -24,7 +24,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
||||
import attr
|
||||
from signedjson.types import SigningKey
|
||||
|
||||
from synapse.api.constants import MAX_DEPTH
|
||||
from synapse.api.constants import MAX_DEPTH, EventTypes
|
||||
from synapse.api.room_versions import (
|
||||
KNOWN_EVENT_FORMAT_VERSIONS,
|
||||
EventFormatVersions,
|
||||
@@ -109,6 +109,19 @@ class EventBuilder:
|
||||
def is_state(self) -> bool:
|
||||
return self._state_key is not None
|
||||
|
||||
def is_mine_id(self, user_id: str) -> bool:
|
||||
"""Determines whether a user ID or room alias originates from this homeserver.
|
||||
|
||||
Returns:
|
||||
`True` if the hostname part of the user ID or room alias matches this
|
||||
homeserver.
|
||||
`False` otherwise, or if the user ID or room alias is malformed.
|
||||
"""
|
||||
localpart_hostname = user_id.split(":", 1)
|
||||
if len(localpart_hostname) < 2:
|
||||
return False
|
||||
return localpart_hostname[1] == self._hostname
|
||||
|
||||
async def build(
|
||||
self,
|
||||
prev_event_ids: List[str],
|
||||
@@ -142,6 +155,46 @@ class EventBuilder:
|
||||
self, state_ids
|
||||
)
|
||||
|
||||
# Check for out-of-band membership that may have been exposed on `/sync` but
|
||||
# the events have not been de-outliered yet so they won't be part of the
|
||||
# room state yet.
|
||||
#
|
||||
# This helps in situations where a remote homeserver invites a local user to
|
||||
# a room that we're already participating in; and we've persisted the invite
|
||||
# as an out-of-band membership (outlier), but it hasn't been pushed to us as
|
||||
# part of a `/send` transaction yet and de-outliered. This also helps for
|
||||
# any of the other out-of-band membership transitions.
|
||||
#
|
||||
# As an optimization, we could check if the room state already includes a
|
||||
# non-`leave` membership event, then we can assume the membership event has
|
||||
# been de-outliered and we don't need to check for an out-of-band
|
||||
# membership. But we don't have the necessary information from a
|
||||
# `StateMap[str]` and we'll just have to take the hit of this extra lookup
|
||||
# for any membership event for now.
|
||||
if self.type == EventTypes.Member and self.is_mine_id(self.state_key):
|
||||
(
|
||||
_membership,
|
||||
member_event_id,
|
||||
) = await self._store.get_local_current_membership_for_user_in_room(
|
||||
user_id=self.state_key,
|
||||
room_id=self.room_id,
|
||||
)
|
||||
# There is no need to check if the membership is actually an
|
||||
# out-of-band membership (`outlier`) as we would end up with the
|
||||
# same result either way (adding the member event to the
|
||||
# `auth_event_ids`).
|
||||
if (
|
||||
member_event_id is not None
|
||||
# We only need to be careful about duplicating the event in the
|
||||
# `auth_event_ids` list (duplicate `type`/`state_key` is part of the
|
||||
# authorization rules)
|
||||
and member_event_id not in auth_event_ids
|
||||
):
|
||||
auth_event_ids.append(member_event_id)
|
||||
# Also make sure to point to the previous membership event that will
|
||||
# allow this one to happen so the computed state works out.
|
||||
prev_event_ids.append(member_event_id)
|
||||
|
||||
format_version = self.room_version.event_format
|
||||
# The types of auth/prev events changes between event versions.
|
||||
prev_events: Union[StrCollection, List[Tuple[str, Dict[str, str]]]]
|
||||
|
||||
@@ -248,7 +248,7 @@ class EventContext(UnpersistedEventContextBase):
|
||||
@tag_args
|
||||
async def get_current_state_ids(
|
||||
self, state_filter: Optional["StateFilter"] = None
|
||||
) -> Optional[StateMap[str]]:
|
||||
) -> StateMap[str]:
|
||||
"""
|
||||
Gets the room state map, including this event - ie, the state in ``state_group``
|
||||
|
||||
@@ -256,13 +256,12 @@ class EventContext(UnpersistedEventContextBase):
|
||||
not make it into the room state. This method will raise an exception if
|
||||
``rejected`` is set.
|
||||
|
||||
It is also an error to access this for an outlier event.
|
||||
|
||||
Arg:
|
||||
state_filter: specifies the type of state event to fetch from DB, example: EventTypes.JoinRules
|
||||
|
||||
Returns:
|
||||
Returns None if state_group is None, which happens when the associated
|
||||
event is an outlier.
|
||||
|
||||
Maps a (type, state_key) to the event ID of the state event matching
|
||||
this tuple.
|
||||
"""
|
||||
@@ -300,7 +299,8 @@ class EventContext(UnpersistedEventContextBase):
|
||||
this tuple.
|
||||
"""
|
||||
|
||||
assert self.state_group_before_event is not None
|
||||
if self.state_group_before_event is None:
|
||||
return {}
|
||||
return await self._storage.state.get_state_ids_for_group(
|
||||
self.state_group_before_event, state_filter
|
||||
)
|
||||
|
||||
@@ -139,13 +139,13 @@ from typing import (
|
||||
Hashable,
|
||||
Iterable,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
import attr
|
||||
from prometheus_client import Counter
|
||||
from typing_extensions import Literal
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
||||
@@ -20,9 +20,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple, Type
|
||||
|
||||
from typing_extensions import Literal
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, List, Literal, Optional, Tuple, Type
|
||||
|
||||
from synapse.api.errors import FederationDeniedError, SynapseError
|
||||
from synapse.federation.transport.server._base import (
|
||||
|
||||
@@ -24,6 +24,7 @@ from typing import (
|
||||
TYPE_CHECKING,
|
||||
Dict,
|
||||
List,
|
||||
Literal,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
@@ -32,8 +33,6 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from synapse.api.constants import Direction, EduTypes
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.room_versions import RoomVersions
|
||||
|
||||
@@ -473,7 +473,7 @@ class AdminHandler:
|
||||
"type": EventTypes.Redaction,
|
||||
"content": {"reason": reason} if reason else {},
|
||||
"room_id": room,
|
||||
"sender": user_id,
|
||||
"sender": requester.user.to_string(),
|
||||
}
|
||||
if room_version.updated_redaction_rules:
|
||||
event_dict["content"]["redacts"] = event.event_id
|
||||
|
||||
@@ -896,10 +896,10 @@ class ApplicationServicesHandler:
|
||||
results = await make_deferred_yieldable(
|
||||
defer.DeferredList(
|
||||
[
|
||||
run_in_background(
|
||||
run_in_background( # type: ignore[call-overload]
|
||||
self.appservice_api.claim_client_keys,
|
||||
# We know this must be an app service.
|
||||
self.store.get_app_service_by_id(service_id), # type: ignore[arg-type]
|
||||
self.store.get_app_service_by_id(service_id),
|
||||
service_query,
|
||||
)
|
||||
for service_id, service_query in query_by_appservice.items()
|
||||
@@ -952,10 +952,10 @@ class ApplicationServicesHandler:
|
||||
results = await make_deferred_yieldable(
|
||||
defer.DeferredList(
|
||||
[
|
||||
run_in_background(
|
||||
run_in_background( # type: ignore[call-overload]
|
||||
self.appservice_api.query_keys,
|
||||
# We know this must be an app service.
|
||||
self.store.get_app_service_by_id(service_id), # type: ignore[arg-type]
|
||||
self.store.get_app_service_by_id(service_id),
|
||||
service_query,
|
||||
)
|
||||
for service_id, service_query in query_by_appservice.items()
|
||||
|
||||
@@ -1579,7 +1579,10 @@ class AuthHandler:
|
||||
# for the presence of an email address during password reset was
|
||||
# case sensitive).
|
||||
if medium == "email":
|
||||
address = canonicalise_email(address)
|
||||
try:
|
||||
address = canonicalise_email(address)
|
||||
except ValueError as e:
|
||||
raise SynapseError(400, str(e))
|
||||
|
||||
await self.store.user_add_threepid(
|
||||
user_id, medium, address, validated_at, self.hs.get_clock().time_msec()
|
||||
@@ -1610,7 +1613,10 @@ class AuthHandler:
|
||||
"""
|
||||
# 'Canonicalise' email addresses as per above
|
||||
if medium == "email":
|
||||
address = canonicalise_email(address)
|
||||
try:
|
||||
address = canonicalise_email(address)
|
||||
except ValueError as e:
|
||||
raise SynapseError(400, str(e))
|
||||
|
||||
await self.store.user_delete_threepid(user_id, medium, address)
|
||||
|
||||
|
||||
@@ -21,9 +21,7 @@
|
||||
|
||||
import logging
|
||||
import string
|
||||
from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence
|
||||
|
||||
from typing_extensions import Literal
|
||||
from typing import TYPE_CHECKING, Iterable, List, Literal, Optional, Sequence
|
||||
|
||||
from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes
|
||||
from synapse.api.errors import (
|
||||
|
||||
@@ -20,9 +20,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, Optional, cast
|
||||
|
||||
from typing_extensions import Literal
|
||||
from typing import TYPE_CHECKING, Dict, Literal, Optional, cast
|
||||
|
||||
from synapse.api.errors import (
|
||||
Codes,
|
||||
|
||||
@@ -151,6 +151,8 @@ class FederationEventHandler:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self._clock = hs.get_clock()
|
||||
self._store = hs.get_datastores().main
|
||||
self._state_store = hs.get_datastores().state
|
||||
self._state_deletion_store = hs.get_datastores().state_deletion
|
||||
self._storage_controllers = hs.get_storage_controllers()
|
||||
self._state_storage_controller = self._storage_controllers.state
|
||||
|
||||
@@ -580,7 +582,9 @@ class FederationEventHandler:
|
||||
room_version.identifier,
|
||||
state_maps_to_resolve,
|
||||
event_map=None,
|
||||
state_res_store=StateResolutionStore(self._store),
|
||||
state_res_store=StateResolutionStore(
|
||||
self._store, self._state_deletion_store
|
||||
),
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -1179,7 +1183,9 @@ class FederationEventHandler:
|
||||
room_version,
|
||||
state_maps,
|
||||
event_map={event_id: event},
|
||||
state_res_store=StateResolutionStore(self._store),
|
||||
state_res_store=StateResolutionStore(
|
||||
self._store, self._state_deletion_store
|
||||
),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -1874,7 +1880,9 @@ class FederationEventHandler:
|
||||
room_version,
|
||||
[local_state_id_map, claimed_auth_events_id_map],
|
||||
event_map=None,
|
||||
state_res_store=StateResolutionStore(self._store),
|
||||
state_res_store=StateResolutionStore(
|
||||
self._store, self._state_deletion_store
|
||||
),
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -2014,7 +2022,9 @@ class FederationEventHandler:
|
||||
room_version,
|
||||
state_sets,
|
||||
event_map=None,
|
||||
state_res_store=StateResolutionStore(self._store),
|
||||
state_res_store=StateResolutionStore(
|
||||
self._store, self._state_deletion_store
|
||||
),
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -2272,8 +2282,9 @@ class FederationEventHandler:
|
||||
event_and_contexts, backfilled=backfilled
|
||||
)
|
||||
|
||||
# After persistence we always need to notify replication there may
|
||||
# be new data.
|
||||
# After persistence, we never notify clients (wake up `/sync` streams) about
|
||||
# backfilled events but it's important to let all the workers know about any
|
||||
# new event (backfilled or not) because TODO
|
||||
self._notifier.notify_replication()
|
||||
|
||||
if self._ephemeral_messages_enabled:
|
||||
|
||||
@@ -31,6 +31,7 @@ from typing import (
|
||||
List,
|
||||
Optional,
|
||||
Type,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
@@ -52,7 +53,6 @@ from pymacaroons.exceptions import (
|
||||
MacaroonInitException,
|
||||
MacaroonInvalidSignatureException,
|
||||
)
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from twisted.web.client import readBody
|
||||
from twisted.web.http_headers import Headers
|
||||
@@ -1002,7 +1002,21 @@ class OidcProvider:
|
||||
"""
|
||||
|
||||
state = generate_token()
|
||||
nonce = generate_token()
|
||||
|
||||
# Generate a nonce 32 characters long. When encoded with base64url later on,
|
||||
# the nonce will be 43 characters when sent to the identity provider.
|
||||
#
|
||||
# While RFC7636 does not specify a minimum length for the `nonce`
|
||||
# parameter, the TI-Messenger IDP_FD spec v1.7.3 does require it to be
|
||||
# between 43 and 128 characters. This spec concerns using Matrix for
|
||||
# communication in German healthcare.
|
||||
#
|
||||
# As increasing the length only strengthens security, we use this length
|
||||
# to allow TI-Messenger deployments using Synapse to satisfy this
|
||||
# external spec.
|
||||
#
|
||||
# See https://github.com/element-hq/synapse/pull/18109 for more context.
|
||||
nonce = generate_token(length=32)
|
||||
code_verifier = ""
|
||||
|
||||
if not client_redirect_url:
|
||||
|
||||
+142
-10
@@ -22,6 +22,7 @@ import logging
|
||||
import random
|
||||
from typing import TYPE_CHECKING, List, Optional, Union
|
||||
|
||||
from synapse.api.constants import ProfileFields
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
@@ -31,7 +32,7 @@ from synapse.api.errors import (
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.storage.databases.main.media_repository import LocalMedia, RemoteMedia
|
||||
from synapse.types import JsonDict, Requester, UserID, create_requester
|
||||
from synapse.types import JsonDict, JsonValue, Requester, UserID, create_requester
|
||||
from synapse.util.caches.descriptors import cached
|
||||
from synapse.util.stringutils import parse_and_validate_mxc_uri
|
||||
|
||||
@@ -42,6 +43,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
MAX_DISPLAYNAME_LEN = 256
|
||||
MAX_AVATAR_URL_LEN = 1000
|
||||
# Field name length is specced at 255 bytes.
|
||||
MAX_CUSTOM_FIELD_LEN = 255
|
||||
|
||||
|
||||
class ProfileHandler:
|
||||
@@ -83,19 +86,33 @@ class ProfileHandler:
|
||||
|
||||
Returns:
|
||||
A JSON dictionary. For local queries this will include the displayname and avatar_url
|
||||
fields. For remote queries it may contain arbitrary information.
|
||||
fields, if set. For remote queries it may contain arbitrary information.
|
||||
"""
|
||||
target_user = UserID.from_string(user_id)
|
||||
|
||||
if self.hs.is_mine(target_user):
|
||||
profileinfo = await self.store.get_profileinfo(target_user)
|
||||
if profileinfo.display_name is None and profileinfo.avatar_url is None:
|
||||
extra_fields = {}
|
||||
if self.hs.config.experimental.msc4133_enabled:
|
||||
extra_fields = await self.store.get_profile_fields(target_user)
|
||||
|
||||
if (
|
||||
profileinfo.display_name is None
|
||||
and profileinfo.avatar_url is None
|
||||
and not extra_fields
|
||||
):
|
||||
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
|
||||
|
||||
return {
|
||||
"displayname": profileinfo.display_name,
|
||||
"avatar_url": profileinfo.avatar_url,
|
||||
}
|
||||
# Do not include display name or avatar if unset.
|
||||
ret = {}
|
||||
if profileinfo.display_name is not None:
|
||||
ret[ProfileFields.DISPLAYNAME] = profileinfo.display_name
|
||||
if profileinfo.avatar_url is not None:
|
||||
ret[ProfileFields.AVATAR_URL] = profileinfo.avatar_url
|
||||
if extra_fields:
|
||||
ret.update(extra_fields)
|
||||
|
||||
return ret
|
||||
else:
|
||||
try:
|
||||
result = await self.federation.make_query(
|
||||
@@ -399,6 +416,110 @@ class ProfileHandler:
|
||||
|
||||
return True
|
||||
|
||||
async def get_profile_field(
|
||||
self, target_user: UserID, field_name: str
|
||||
) -> JsonValue:
|
||||
"""
|
||||
Fetch a user's profile from the database for local users and over federation
|
||||
for remote users.
|
||||
|
||||
Args:
|
||||
target_user: The user ID to fetch the profile for.
|
||||
field_name: The field to fetch the profile for.
|
||||
|
||||
Returns:
|
||||
The value for the profile field or None if the field does not exist.
|
||||
"""
|
||||
if self.hs.is_mine(target_user):
|
||||
try:
|
||||
field_value = await self.store.get_profile_field(
|
||||
target_user, field_name
|
||||
)
|
||||
except StoreError as e:
|
||||
if e.code == 404:
|
||||
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
|
||||
raise
|
||||
|
||||
return field_value
|
||||
else:
|
||||
try:
|
||||
result = await self.federation.make_query(
|
||||
destination=target_user.domain,
|
||||
query_type="profile",
|
||||
args={"user_id": target_user.to_string(), "field": field_name},
|
||||
ignore_backoff=True,
|
||||
)
|
||||
except RequestSendFailed as e:
|
||||
raise SynapseError(502, "Failed to fetch profile") from e
|
||||
except HttpResponseException as e:
|
||||
raise e.to_synapse_error()
|
||||
|
||||
return result.get(field_name)
|
||||
|
||||
async def set_profile_field(
|
||||
self,
|
||||
target_user: UserID,
|
||||
requester: Requester,
|
||||
field_name: str,
|
||||
new_value: JsonValue,
|
||||
by_admin: bool = False,
|
||||
deactivation: bool = False,
|
||||
) -> None:
|
||||
"""Set a new profile field for a user.
|
||||
|
||||
Args:
|
||||
target_user: the user whose profile is to be changed.
|
||||
requester: The user attempting to make this change.
|
||||
field_name: The name of the profile field to update.
|
||||
new_value: The new field value for this user.
|
||||
by_admin: Whether this change was made by an administrator.
|
||||
deactivation: Whether this change was made while deactivating the user.
|
||||
"""
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "User is not hosted on this homeserver")
|
||||
|
||||
if not by_admin and target_user != requester.user:
|
||||
raise AuthError(403, "Cannot set another user's profile")
|
||||
|
||||
await self.store.set_profile_field(target_user, field_name, new_value)
|
||||
|
||||
# Custom fields do not propagate into the user directory *or* rooms.
|
||||
profile = await self.store.get_profileinfo(target_user)
|
||||
await self._third_party_rules.on_profile_update(
|
||||
target_user.to_string(), profile, by_admin, deactivation
|
||||
)
|
||||
|
||||
async def delete_profile_field(
|
||||
self,
|
||||
target_user: UserID,
|
||||
requester: Requester,
|
||||
field_name: str,
|
||||
by_admin: bool = False,
|
||||
deactivation: bool = False,
|
||||
) -> None:
|
||||
"""Delete a field from a user's profile.
|
||||
|
||||
Args:
|
||||
target_user: the user whose profile is to be changed.
|
||||
requester: The user attempting to make this change.
|
||||
field_name: The name of the profile field to remove.
|
||||
by_admin: Whether this change was made by an administrator.
|
||||
deactivation: Whether this change was made while deactivating the user.
|
||||
"""
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "User is not hosted on this homeserver")
|
||||
|
||||
if not by_admin and target_user != requester.user:
|
||||
raise AuthError(400, "Cannot set another user's profile")
|
||||
|
||||
await self.store.delete_profile_field(target_user, field_name)
|
||||
|
||||
# Custom fields do not propagate into the user directory *or* rooms.
|
||||
profile = await self.store.get_profileinfo(target_user)
|
||||
await self._third_party_rules.on_profile_update(
|
||||
target_user.to_string(), profile, by_admin, deactivation
|
||||
)
|
||||
|
||||
async def on_profile_query(self, args: JsonDict) -> JsonDict:
|
||||
"""Handles federation profile query requests."""
|
||||
|
||||
@@ -415,13 +536,24 @@ class ProfileHandler:
|
||||
|
||||
just_field = args.get("field", None)
|
||||
|
||||
response = {}
|
||||
response: JsonDict = {}
|
||||
try:
|
||||
if just_field is None or just_field == "displayname":
|
||||
if just_field is None or just_field == ProfileFields.DISPLAYNAME:
|
||||
response["displayname"] = await self.store.get_profile_displayname(user)
|
||||
|
||||
if just_field is None or just_field == "avatar_url":
|
||||
if just_field is None or just_field == ProfileFields.AVATAR_URL:
|
||||
response["avatar_url"] = await self.store.get_profile_avatar_url(user)
|
||||
|
||||
if self.hs.config.experimental.msc4133_enabled:
|
||||
if just_field is None:
|
||||
response.update(await self.store.get_profile_fields(user))
|
||||
elif just_field not in (
|
||||
ProfileFields.DISPLAYNAME,
|
||||
ProfileFields.AVATAR_URL,
|
||||
):
|
||||
response[just_field] = await self.store.get_profile_field(
|
||||
user, just_field
|
||||
)
|
||||
except StoreError as e:
|
||||
if e.code == 404:
|
||||
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
|
||||
|
||||
@@ -23,10 +23,9 @@
|
||||
"""Contains functions for registering clients."""
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple
|
||||
from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple, TypedDict
|
||||
|
||||
from prometheus_client import Counter
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from synapse import types
|
||||
from synapse.api.constants import (
|
||||
|
||||
@@ -47,15 +47,45 @@ logger = logging.getLogger(__name__)
|
||||
_is_old_twisted = parse_version(twisted.__version__) < parse_version("21")
|
||||
|
||||
|
||||
class _NoTLSESMTPSender(ESMTPSender):
|
||||
"""Extend ESMTPSender to disable TLS
|
||||
class _BackportESMTPSender(ESMTPSender):
|
||||
"""Extend old versions of ESMTPSender to configure TLS.
|
||||
|
||||
Unfortunately, before Twisted 21.2, ESMTPSender doesn't give an easy way to disable
|
||||
TLS, so we override its internal method which it uses to generate a context factory.
|
||||
Unfortunately, before Twisted 21.2, ESMTPSender doesn't give an easy way to
|
||||
disable TLS, or to configure the hostname used for TLS certificate validation.
|
||||
This backports the `hostname` parameter for that functionality.
|
||||
"""
|
||||
|
||||
__hostname: Optional[str]
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
""""""
|
||||
self.__hostname = kwargs.pop("hostname", None)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def _getContextFactory(self) -> Optional[IOpenSSLContextFactory]:
|
||||
return None
|
||||
if self.context is not None:
|
||||
return self.context
|
||||
elif self.__hostname is None:
|
||||
return None # disable TLS if hostname is None
|
||||
return optionsForClientTLS(self.__hostname)
|
||||
|
||||
|
||||
class _BackportESMTPSenderFactory(ESMTPSenderFactory):
|
||||
"""An ESMTPSenderFactory for _BackportESMTPSender.
|
||||
|
||||
This backports the `hostname` parameter, to disable or configure TLS.
|
||||
"""
|
||||
|
||||
__hostname: Optional[str]
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
self.__hostname = kwargs.pop("hostname", None)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def protocol(self, *args: Any, **kwargs: Any) -> ESMTPSender: # type: ignore
|
||||
# this overrides ESMTPSenderFactory's `protocol` attribute, with a Callable
|
||||
# instantiating our _BackportESMTPSender, providing the hostname parameter
|
||||
return _BackportESMTPSender(*args, **kwargs, hostname=self.__hostname)
|
||||
|
||||
|
||||
async def _sendmail(
|
||||
@@ -71,6 +101,7 @@ async def _sendmail(
|
||||
require_tls: bool = False,
|
||||
enable_tls: bool = True,
|
||||
force_tls: bool = False,
|
||||
tlsname: Optional[str] = None,
|
||||
) -> None:
|
||||
"""A simple wrapper around ESMTPSenderFactory, to allow substitution in tests
|
||||
|
||||
@@ -88,39 +119,33 @@ async def _sendmail(
|
||||
enable_tls: True to enable STARTTLS. If this is False and require_tls is True,
|
||||
the request will fail.
|
||||
force_tls: True to enable Implicit TLS.
|
||||
tlsname: the domain name expected as the TLS certificate's commonname,
|
||||
defaults to smtphost.
|
||||
"""
|
||||
msg = BytesIO(msg_bytes)
|
||||
d: "Deferred[object]" = Deferred()
|
||||
if not enable_tls:
|
||||
tlsname = None
|
||||
elif tlsname is None:
|
||||
tlsname = smtphost
|
||||
|
||||
def build_sender_factory(**kwargs: Any) -> ESMTPSenderFactory:
|
||||
return ESMTPSenderFactory(
|
||||
username,
|
||||
password,
|
||||
from_addr,
|
||||
to_addr,
|
||||
msg,
|
||||
d,
|
||||
heloFallback=True,
|
||||
requireAuthentication=require_auth,
|
||||
requireTransportSecurity=require_tls,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
factory: IProtocolFactory
|
||||
if _is_old_twisted:
|
||||
# before twisted 21.2, we have to override the ESMTPSender protocol to disable
|
||||
# TLS
|
||||
factory = build_sender_factory()
|
||||
|
||||
if not enable_tls:
|
||||
factory.protocol = _NoTLSESMTPSender
|
||||
else:
|
||||
# for twisted 21.2 and later, there is a 'hostname' parameter which we should
|
||||
# set to enable TLS.
|
||||
factory = build_sender_factory(hostname=smtphost if enable_tls else None)
|
||||
factory: IProtocolFactory = (
|
||||
_BackportESMTPSenderFactory if _is_old_twisted else ESMTPSenderFactory
|
||||
)(
|
||||
username,
|
||||
password,
|
||||
from_addr,
|
||||
to_addr,
|
||||
msg,
|
||||
d,
|
||||
heloFallback=True,
|
||||
requireAuthentication=require_auth,
|
||||
requireTransportSecurity=require_tls,
|
||||
hostname=tlsname,
|
||||
)
|
||||
|
||||
if force_tls:
|
||||
factory = TLSMemoryBIOFactory(optionsForClientTLS(smtphost), True, factory)
|
||||
factory = TLSMemoryBIOFactory(optionsForClientTLS(tlsname), True, factory)
|
||||
|
||||
endpoint = HostnameEndpoint(
|
||||
reactor, smtphost, smtpport, timeout=30, bindAddress=None
|
||||
@@ -148,6 +173,7 @@ class SendEmailHandler:
|
||||
self._require_transport_security = hs.config.email.require_transport_security
|
||||
self._enable_tls = hs.config.email.enable_smtp_tls
|
||||
self._force_tls = hs.config.email.force_tls
|
||||
self._tlsname = hs.config.email.email_tlsname
|
||||
|
||||
self._sendmail = _sendmail
|
||||
|
||||
@@ -227,4 +253,5 @@ class SendEmailHandler:
|
||||
require_tls=self._require_transport_security,
|
||||
enable_tls=self._enable_tls,
|
||||
force_tls=self._force_tls,
|
||||
tlsname=self._tlsname,
|
||||
)
|
||||
|
||||
+11
-6
@@ -33,17 +33,17 @@ from typing import (
|
||||
Mapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Protocol,
|
||||
Set,
|
||||
)
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import attr
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from twisted.web.iweb import IRequest
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.api.constants import LoginType
|
||||
from synapse.api.constants import LoginType, ProfileFields
|
||||
from synapse.api.errors import Codes, NotFoundError, RedirectException, SynapseError
|
||||
from synapse.config.sso import SsoAttributeRequirement
|
||||
from synapse.handlers.device import DeviceHandler
|
||||
@@ -813,9 +813,10 @@ class SsoHandler:
|
||||
|
||||
# bail if user already has the same avatar
|
||||
profile = await self._profile_handler.get_profile(user_id)
|
||||
if profile["avatar_url"] is not None:
|
||||
server_name = profile["avatar_url"].split("/")[-2]
|
||||
media_id = profile["avatar_url"].split("/")[-1]
|
||||
if ProfileFields.AVATAR_URL in profile:
|
||||
avatar_url_parts = profile[ProfileFields.AVATAR_URL].split("/")
|
||||
server_name = avatar_url_parts[-2]
|
||||
media_id = avatar_url_parts[-1]
|
||||
if self._is_mine_server_name(server_name):
|
||||
media = await self._media_repo.store.get_local_media(media_id) # type: ignore[has-type]
|
||||
if media is not None and upload_name == media.upload_name:
|
||||
@@ -1276,12 +1277,16 @@ def _check_attribute_requirement(
|
||||
return False
|
||||
|
||||
# If the requirement is None, the attribute existing is enough.
|
||||
if req.value is None:
|
||||
if req.value is None and req.one_of is None:
|
||||
return True
|
||||
|
||||
values = attributes[req.attribute]
|
||||
if req.value in values:
|
||||
return True
|
||||
if req.one_of:
|
||||
for value in req.one_of:
|
||||
if value in values:
|
||||
return True
|
||||
|
||||
logger.info(
|
||||
"SSO attribute %s did not match required value '%s' (was '%s')",
|
||||
|
||||
@@ -26,7 +26,13 @@ from typing import TYPE_CHECKING, List, Optional, Set, Tuple
|
||||
from twisted.internet.interfaces import IDelayedCall
|
||||
|
||||
import synapse.metrics
|
||||
from synapse.api.constants import EventTypes, HistoryVisibility, JoinRules, Membership
|
||||
from synapse.api.constants import (
|
||||
EventTypes,
|
||||
HistoryVisibility,
|
||||
JoinRules,
|
||||
Membership,
|
||||
ProfileFields,
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.handlers.state_deltas import MatchChange, StateDeltasHandler
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
@@ -161,7 +167,7 @@ class UserDirectoryHandler(StateDeltasHandler):
|
||||
non_spammy_users = []
|
||||
for user in results["results"]:
|
||||
if not await self._spam_checker_module_callbacks.check_username_for_spam(
|
||||
user
|
||||
user, user_id
|
||||
):
|
||||
non_spammy_users.append(user)
|
||||
results["results"] = non_spammy_users
|
||||
@@ -756,6 +762,10 @@ class UserDirectoryHandler(StateDeltasHandler):
|
||||
|
||||
await self.store.update_profile_in_user_dir(
|
||||
user_id,
|
||||
display_name=non_null_str_or_none(profile.get("displayname")),
|
||||
avatar_url=non_null_str_or_none(profile.get("avatar_url")),
|
||||
display_name=non_null_str_or_none(
|
||||
profile.get(ProfileFields.DISPLAYNAME)
|
||||
),
|
||||
avatar_url=non_null_str_or_none(
|
||||
profile.get(ProfileFields.AVATAR_URL)
|
||||
),
|
||||
)
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
import logging
|
||||
import random
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
@@ -269,6 +270,10 @@ class WaitingLock:
|
||||
def _get_next_retry_interval(self) -> float:
|
||||
next = self._retry_interval
|
||||
self._retry_interval = max(5, next * 2)
|
||||
if self._retry_interval > 5 * 2 ^ 7: # ~10 minutes
|
||||
logging.warning(
|
||||
f"Lock timeout is getting excessive: {self._retry_interval}s. There may be a deadlock."
|
||||
)
|
||||
return next * random.uniform(0.9, 1.1)
|
||||
|
||||
|
||||
@@ -344,4 +349,8 @@ class WaitingMultiLock:
|
||||
def _get_next_retry_interval(self) -> float:
|
||||
next = self._retry_interval
|
||||
self._retry_interval = max(5, next * 2)
|
||||
if self._retry_interval > 5 * 2 ^ 7: # ~10 minutes
|
||||
logging.warning(
|
||||
f"Lock timeout is getting excessive: {self._retry_interval}s. There may be a deadlock."
|
||||
)
|
||||
return next * random.uniform(0.9, 1.1)
|
||||
|
||||
@@ -31,6 +31,7 @@ from typing import (
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Protocol,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
@@ -40,8 +41,7 @@ import treq
|
||||
from canonicaljson import encode_canonical_json
|
||||
from netaddr import AddrFormatError, IPAddress, IPSet
|
||||
from prometheus_client import Counter
|
||||
from typing_extensions import Protocol
|
||||
from zope.interface import implementer, provider
|
||||
from zope.interface import implementer
|
||||
|
||||
from OpenSSL import SSL
|
||||
from OpenSSL.SSL import VERIFY_NONE
|
||||
@@ -225,7 +225,7 @@ class _IPBlockingResolver:
|
||||
recv.addressResolved(address)
|
||||
recv.resolutionComplete()
|
||||
|
||||
@provider(IResolutionReceiver)
|
||||
@implementer(IResolutionReceiver)
|
||||
class EndpointReceiver:
|
||||
@staticmethod
|
||||
def resolutionBegan(resolutionInProgress: IHostResolution) -> None:
|
||||
@@ -239,8 +239,9 @@ class _IPBlockingResolver:
|
||||
def resolutionComplete() -> None:
|
||||
_callback()
|
||||
|
||||
endpoint_receiver_wrapper = EndpointReceiver()
|
||||
self._reactor.nameResolver.resolveHostName(
|
||||
EndpointReceiver, hostname, portNumber=portNumber
|
||||
endpoint_receiver_wrapper, hostname, portNumber=portNumber
|
||||
)
|
||||
|
||||
return recv
|
||||
|
||||
@@ -34,6 +34,7 @@ from typing import (
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
TextIO,
|
||||
Tuple,
|
||||
@@ -48,7 +49,6 @@ import treq
|
||||
from canonicaljson import encode_canonical_json
|
||||
from prometheus_client import Counter
|
||||
from signedjson.sign import sign_json
|
||||
from typing_extensions import Literal
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.internet.error import DNSLookupError
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple
|
||||
from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple, Union
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import ( # type: ignore[attr-defined]
|
||||
getproxies_environment,
|
||||
@@ -150,6 +150,12 @@ class ProxyAgent(_AgentBase):
|
||||
http_proxy = proxies["http"].encode() if "http" in proxies else None
|
||||
https_proxy = proxies["https"].encode() if "https" in proxies else None
|
||||
no_proxy = proxies["no"] if "no" in proxies else None
|
||||
logger.debug(
|
||||
"Using proxy settings: http_proxy=%s, https_proxy=%s, no_proxy=%s",
|
||||
http_proxy,
|
||||
https_proxy,
|
||||
no_proxy,
|
||||
)
|
||||
|
||||
self.http_proxy_endpoint, self.http_proxy_creds = http_proxy_endpoint(
|
||||
http_proxy, self.proxy_reactor, contextFactory, **self._endpoint_kwargs
|
||||
@@ -351,7 +357,9 @@ def http_proxy_endpoint(
|
||||
proxy: Optional[bytes],
|
||||
reactor: IReactorCore,
|
||||
tls_options_factory: Optional[IPolicyForHTTPS],
|
||||
**kwargs: object,
|
||||
timeout: float = 30,
|
||||
bindAddress: Optional[Union[bytes, str, tuple[Union[bytes, str], int]]] = None,
|
||||
attemptDelay: Optional[float] = None,
|
||||
) -> Tuple[Optional[IStreamClientEndpoint], Optional[ProxyCredentials]]:
|
||||
"""Parses an http proxy setting and returns an endpoint for the proxy
|
||||
|
||||
@@ -382,12 +390,15 @@ def http_proxy_endpoint(
|
||||
# 3.9+) on scheme-less proxies, e.g. host:port.
|
||||
scheme, host, port, credentials = parse_proxy(proxy)
|
||||
|
||||
proxy_endpoint = HostnameEndpoint(reactor, host, port, **kwargs)
|
||||
proxy_endpoint = HostnameEndpoint(
|
||||
reactor, host, port, timeout, bindAddress, attemptDelay
|
||||
)
|
||||
|
||||
if scheme == b"https":
|
||||
if tls_options_factory:
|
||||
tls_options = tls_options_factory.creatorForNetloc(host, port)
|
||||
proxy_endpoint = wrapClientTLS(tls_options, proxy_endpoint)
|
||||
wrapped_proxy_endpoint = wrapClientTLS(tls_options, proxy_endpoint)
|
||||
return wrapped_proxy_endpoint, credentials
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"No TLS options for a https connection via proxy {proxy!s}"
|
||||
|
||||
@@ -89,7 +89,7 @@ class ReplicationEndpointFactory:
|
||||
location_config.port,
|
||||
)
|
||||
if scheme == "https":
|
||||
endpoint = wrapClientTLS(
|
||||
wrapped_endpoint = wrapClientTLS(
|
||||
# The 'port' argument below isn't actually used by the function
|
||||
self.context_factory.creatorForNetloc(
|
||||
location_config.host.encode("utf-8"),
|
||||
@@ -97,6 +97,8 @@ class ReplicationEndpointFactory:
|
||||
),
|
||||
endpoint,
|
||||
)
|
||||
return wrapped_endpoint
|
||||
|
||||
return endpoint
|
||||
elif isinstance(location_config, InstanceUnixLocationConfig):
|
||||
return UNIXClientEndpoint(self.reactor, location_config.path)
|
||||
|
||||
@@ -39,6 +39,7 @@ from typing import (
|
||||
List,
|
||||
Optional,
|
||||
Pattern,
|
||||
Protocol,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
@@ -46,7 +47,6 @@ from typing import (
|
||||
import attr
|
||||
import jinja2
|
||||
from canonicaljson import encode_canonical_json
|
||||
from typing_extensions import Protocol
|
||||
from zope.interface import implementer
|
||||
|
||||
from twisted.internet import defer, interfaces
|
||||
|
||||
@@ -28,6 +28,7 @@ from http import HTTPStatus
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
List,
|
||||
Literal,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
@@ -37,8 +38,6 @@ from typing import (
|
||||
overload,
|
||||
)
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
|
||||
@@ -40,6 +40,7 @@ from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Literal,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
@@ -49,7 +50,7 @@ from typing import (
|
||||
)
|
||||
|
||||
import attr
|
||||
from typing_extensions import Literal, ParamSpec
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from twisted.internet import defer, threads
|
||||
from twisted.python.threadpool import ThreadPool
|
||||
|
||||
@@ -19,8 +19,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
|
||||
from typing_extensions import Literal
|
||||
from typing import Literal
|
||||
|
||||
|
||||
class MetadataFilter(logging.Filter):
|
||||
|
||||
@@ -20,13 +20,10 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from types import TracebackType
|
||||
from typing import Optional, Type
|
||||
from typing import Optional
|
||||
|
||||
from opentracing import Scope, ScopeManager, Span
|
||||
|
||||
import twisted
|
||||
|
||||
from synapse.logging.context import (
|
||||
LoggingContext,
|
||||
current_context,
|
||||
@@ -112,9 +109,6 @@ class _LogContextScope(Scope):
|
||||
"""
|
||||
A custom opentracing scope, associated with a LogContext
|
||||
|
||||
* filters out _DefGen_Return exceptions which arise from calling
|
||||
`defer.returnValue` in Twisted code
|
||||
|
||||
* When the scope is closed, the logcontext's active scope is reset to None.
|
||||
and - if enter_logcontext was set - the logcontext is finished too.
|
||||
"""
|
||||
@@ -146,17 +140,6 @@ class _LogContextScope(Scope):
|
||||
self._finish_on_close = finish_on_close
|
||||
self._enter_logcontext = enter_logcontext
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> None:
|
||||
if exc_type == twisted.internet.defer._DefGen_Return:
|
||||
# filter out defer.returnValue() calls
|
||||
exc_type = value = traceback = None
|
||||
super().__exit__(exc_type, value, traceback)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Scope<{self.span}>"
|
||||
|
||||
|
||||
@@ -23,11 +23,10 @@ import ctypes
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Iterable, Optional, overload
|
||||
from typing import Iterable, Literal, Optional, overload
|
||||
|
||||
import attr
|
||||
from prometheus_client import REGISTRY, Metric
|
||||
from typing_extensions import Literal
|
||||
|
||||
from synapse.metrics import GaugeMetricFamily
|
||||
from synapse.metrics._types import Collector
|
||||
|
||||
@@ -45,6 +45,7 @@ from twisted.internet.interfaces import IDelayedCall
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
from synapse.api import errors
|
||||
from synapse.api.constants import ProfileFields
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.config import ConfigError
|
||||
@@ -1086,7 +1087,10 @@ class ModuleApi:
|
||||
content = {}
|
||||
|
||||
# Set the profile if not already done by the module.
|
||||
if "avatar_url" not in content or "displayname" not in content:
|
||||
if (
|
||||
ProfileFields.AVATAR_URL not in content
|
||||
or ProfileFields.DISPLAYNAME not in content
|
||||
):
|
||||
try:
|
||||
# Try to fetch the user's profile.
|
||||
profile = await self._hs.get_profile_handler().get_profile(
|
||||
@@ -1095,8 +1099,8 @@ class ModuleApi:
|
||||
except SynapseError as e:
|
||||
# If the profile couldn't be found, use default values.
|
||||
profile = {
|
||||
"displayname": target_user_id.localpart,
|
||||
"avatar_url": None,
|
||||
ProfileFields.DISPLAYNAME: target_user_id.localpart,
|
||||
ProfileFields.AVATAR_URL: None,
|
||||
}
|
||||
|
||||
if e.code != 404:
|
||||
@@ -1109,11 +1113,9 @@ class ModuleApi:
|
||||
)
|
||||
|
||||
# Set the profile where it needs to be set.
|
||||
if "avatar_url" not in content:
|
||||
content["avatar_url"] = profile["avatar_url"]
|
||||
|
||||
if "displayname" not in content:
|
||||
content["displayname"] = profile["displayname"]
|
||||
for field_name in [ProfileFields.AVATAR_URL, ProfileFields.DISPLAYNAME]:
|
||||
if field_name not in content and field_name in profile:
|
||||
content[field_name] = profile[field_name]
|
||||
|
||||
event_id, _ = await self._hs.get_room_member_handler().update_membership(
|
||||
requester=requester,
|
||||
@@ -1844,6 +1846,10 @@ class ModuleApi:
|
||||
deactivation=deactivation,
|
||||
)
|
||||
|
||||
def get_current_time_msec(self) -> int:
|
||||
"""Returns the current server time in milliseconds."""
|
||||
return self._clock.time_msec()
|
||||
|
||||
|
||||
class PublicRoomListManager:
|
||||
"""Contains methods for adding to, removing from and querying whether a room
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import logging
|
||||
from typing import (
|
||||
@@ -28,14 +29,13 @@ from typing import (
|
||||
Callable,
|
||||
Collection,
|
||||
List,
|
||||
Literal,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
# `Literal` appears with Python 3.8.
|
||||
from typing_extensions import Literal
|
||||
|
||||
import synapse
|
||||
from synapse.api.errors import Codes
|
||||
from synapse.logging.opentracing import trace
|
||||
@@ -168,7 +168,10 @@ USER_MAY_PUBLISH_ROOM_CALLBACK = Callable[
|
||||
]
|
||||
],
|
||||
]
|
||||
CHECK_USERNAME_FOR_SPAM_CALLBACK = Callable[[UserProfile], Awaitable[bool]]
|
||||
CHECK_USERNAME_FOR_SPAM_CALLBACK = Union[
|
||||
Callable[[UserProfile], Awaitable[bool]],
|
||||
Callable[[UserProfile, str], Awaitable[bool]],
|
||||
]
|
||||
LEGACY_CHECK_REGISTRATION_FOR_SPAM_CALLBACK = Callable[
|
||||
[
|
||||
Optional[dict],
|
||||
@@ -293,6 +296,7 @@ def load_legacy_spam_checkers(hs: "synapse.server.HomeServer") -> None:
|
||||
"Bad signature for callback check_registration_for_spam",
|
||||
)
|
||||
|
||||
@functools.wraps(wrapped_func)
|
||||
def run(*args: Any, **kwargs: Any) -> Awaitable:
|
||||
# Assertion required because mypy can't prove we won't change `f`
|
||||
# back to `None`. See
|
||||
@@ -716,7 +720,9 @@ class SpamCheckerModuleApiCallbacks:
|
||||
|
||||
return self.NOT_SPAM
|
||||
|
||||
async def check_username_for_spam(self, user_profile: UserProfile) -> bool:
|
||||
async def check_username_for_spam(
|
||||
self, user_profile: UserProfile, requester_id: str
|
||||
) -> bool:
|
||||
"""Checks if a user ID or display name are considered "spammy" by this server.
|
||||
|
||||
If the server considers a username spammy, then it will not be included in
|
||||
@@ -727,15 +733,33 @@ class SpamCheckerModuleApiCallbacks:
|
||||
* user_id
|
||||
* display_name
|
||||
* avatar_url
|
||||
requester_id: The user ID of the user making the user directory search request.
|
||||
|
||||
Returns:
|
||||
True if the user is spammy.
|
||||
"""
|
||||
for callback in self._check_username_for_spam_callbacks:
|
||||
with Measure(self.clock, f"{callback.__module__}.{callback.__qualname__}"):
|
||||
checker_args = inspect.signature(callback)
|
||||
# Make a copy of the user profile object to ensure the spam checker cannot
|
||||
# modify it.
|
||||
res = await delay_cancellation(callback(user_profile.copy()))
|
||||
# Also ensure backwards compatibility with spam checker callbacks
|
||||
# that don't expect the requester_id argument.
|
||||
if len(checker_args.parameters) == 2:
|
||||
callback_with_requester_id = cast(
|
||||
Callable[[UserProfile, str], Awaitable[bool]], callback
|
||||
)
|
||||
res = await delay_cancellation(
|
||||
callback_with_requester_id(user_profile.copy(), requester_id)
|
||||
)
|
||||
else:
|
||||
callback_without_requester_id = cast(
|
||||
Callable[[UserProfile], Awaitable[bool]], callback
|
||||
)
|
||||
res = await delay_cancellation(
|
||||
callback_without_requester_id(user_profile.copy())
|
||||
)
|
||||
|
||||
if res:
|
||||
return True
|
||||
|
||||
|
||||
@@ -371,7 +371,7 @@ class BulkPushRuleEvaluator:
|
||||
"Deferred[Tuple[int, Tuple[dict, Optional[int]], Dict[str, Dict[str, JsonValue]], Mapping[str, ProfileInfo]]]",
|
||||
gather_results(
|
||||
(
|
||||
run_in_background( # type: ignore[call-arg]
|
||||
run_in_background( # type: ignore[call-overload]
|
||||
self.store.get_number_joined_users_in_room,
|
||||
event.room_id, # type: ignore[arg-type]
|
||||
),
|
||||
@@ -382,10 +382,10 @@ class BulkPushRuleEvaluator:
|
||||
event_id_to_event,
|
||||
),
|
||||
run_in_background(self._related_events, event),
|
||||
run_in_background( # type: ignore[call-arg]
|
||||
run_in_background( # type: ignore[call-overload]
|
||||
self.store.get_subset_users_in_room_with_profiles,
|
||||
event.room_id, # type: ignore[arg-type]
|
||||
rules_by_user.keys(), # type: ignore[arg-type]
|
||||
event.room_id,
|
||||
rules_by_user.keys(),
|
||||
),
|
||||
),
|
||||
consumeErrors=True,
|
||||
|
||||
@@ -18,9 +18,7 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from typing import List, Optional
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
from typing import List, Optional, TypedDict
|
||||
|
||||
|
||||
class EmailReason(TypedDict, total=False):
|
||||
|
||||
@@ -29,7 +29,7 @@ from synapse.rest.client import (
|
||||
account_validity,
|
||||
appservice_ping,
|
||||
auth,
|
||||
auth_issuer,
|
||||
auth_metadata,
|
||||
capabilities,
|
||||
delayed_events,
|
||||
devices,
|
||||
@@ -121,7 +121,7 @@ CLIENT_SERVLET_FUNCTIONS: Tuple[RegisterServletsFunc, ...] = (
|
||||
mutual_rooms.register_servlets,
|
||||
login_token_request.register_servlets,
|
||||
rendezvous.register_servlets,
|
||||
auth_issuer.register_servlets,
|
||||
auth_metadata.register_servlets,
|
||||
)
|
||||
|
||||
SERVLET_GROUPS: Dict[str, Iterable[RegisterServletsFunc]] = {
|
||||
@@ -187,7 +187,7 @@ class ClientRestResource(JsonResource):
|
||||
mutual_rooms.register_servlets,
|
||||
login_token_request.register_servlets,
|
||||
rendezvous.register_servlets,
|
||||
auth_issuer.register_servlets,
|
||||
auth_metadata.register_servlets,
|
||||
]:
|
||||
continue
|
||||
|
||||
|
||||
@@ -107,6 +107,8 @@ from synapse.rest.admin.users import (
|
||||
UserAdminServlet,
|
||||
UserByExternalId,
|
||||
UserByThreePid,
|
||||
UserInvitesCount,
|
||||
UserJoinedRoomCount,
|
||||
UserMembershipRestServlet,
|
||||
UserRegisterServlet,
|
||||
UserReplaceMasterCrossSigningKeyRestServlet,
|
||||
@@ -323,6 +325,8 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
UserByThreePid(hs).register(http_server)
|
||||
RedactUser(hs).register(http_server)
|
||||
RedactUserStatus(hs).register(http_server)
|
||||
UserInvitesCount(hs).register(http_server)
|
||||
UserJoinedRoomCount(hs).register(http_server)
|
||||
|
||||
DeviceRestServlet(hs).register(http_server)
|
||||
DevicesRestServlet(hs).register(http_server)
|
||||
|
||||
@@ -50,8 +50,10 @@ class EventReportsRestServlet(RestServlet):
|
||||
The parameters `from` and `limit` are required only for pagination.
|
||||
By default, a `limit` of 100 is used.
|
||||
The parameter `dir` can be used to define the order of results.
|
||||
The parameter `user_id` can be used to filter by user id.
|
||||
The parameter `room_id` can be used to filter by room id.
|
||||
The `user_id` query parameter filters by the user ID of the reporter of the event.
|
||||
The `room_id` query parameter filters by room id.
|
||||
The `event_sender_user_id` query parameter can be used to filter by the user id
|
||||
of the sender of the reported event.
|
||||
Returns:
|
||||
A list of reported events and an integer representing the total number of
|
||||
reported events that exist given this query
|
||||
@@ -71,6 +73,7 @@ class EventReportsRestServlet(RestServlet):
|
||||
direction = parse_enum(request, "dir", Direction, Direction.BACKWARDS)
|
||||
user_id = parse_string(request, "user_id")
|
||||
room_id = parse_string(request, "room_id")
|
||||
event_sender_user_id = parse_string(request, "event_sender_user_id")
|
||||
|
||||
if start < 0:
|
||||
raise SynapseError(
|
||||
@@ -87,7 +90,7 @@ class EventReportsRestServlet(RestServlet):
|
||||
)
|
||||
|
||||
event_reports, total = await self._store.get_event_reports_paginate(
|
||||
start, limit, direction, user_id, room_id
|
||||
start, limit, direction, user_id, room_id, event_sender_user_id
|
||||
)
|
||||
ret = {"event_reports": event_reports, "total": total}
|
||||
if (start + limit) < total:
|
||||
|
||||
@@ -23,6 +23,7 @@ from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple, cast
|
||||
|
||||
import attr
|
||||
from immutabledict import immutabledict
|
||||
|
||||
from synapse.api.constants import Direction, EventTypes, JoinRules, Membership
|
||||
from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError
|
||||
@@ -463,7 +464,18 @@ class RoomStateRestServlet(RestServlet):
|
||||
if not room:
|
||||
raise NotFoundError("Room not found")
|
||||
|
||||
event_ids = await self._storage_controllers.state.get_current_state_ids(room_id)
|
||||
state_filter = None
|
||||
type = parse_string(request, "type")
|
||||
|
||||
if type:
|
||||
state_filter = StateFilter(
|
||||
types=immutabledict({type: None}),
|
||||
include_others=False,
|
||||
)
|
||||
|
||||
event_ids = await self._storage_controllers.state.get_current_state_ids(
|
||||
room_id, state_filter
|
||||
)
|
||||
events = await self.store.get_events(event_ids.values())
|
||||
now = self.clock.time_msec()
|
||||
room_state = await self._event_serializer.serialize_events(events.values(), now)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user