Compare commits
264 Commits
anoa/modul
...
shay/exper
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
86ec83456e | ||
|
|
b99dab3e3f | ||
|
|
a767f1c8a9 | ||
|
|
2cb41822cc | ||
|
|
9155b82c64 | ||
|
|
7568c726d3 | ||
|
|
e156b84c3f | ||
|
|
aea7cbd48c | ||
|
|
7e6ad62c49 | ||
|
|
86d541f37c | ||
|
|
d3bd03559b | ||
|
|
ab4535b608 | ||
|
|
266d287165 | ||
|
|
64a11fb61f | ||
|
|
4b4e0dc3ce | ||
|
|
2bfe3f0b81 | ||
|
|
6b7da31221 | ||
|
|
58a07f0c3f | ||
|
|
058c6269f3 | ||
|
|
0a18aa236d | ||
|
|
db093df5eb | ||
|
|
245d34bdcc | ||
|
|
9a87895b59 | ||
|
|
28bceef84e | ||
|
|
36df9c5e36 | ||
|
|
a0f53afd62 | ||
|
|
ad141efb47 | ||
|
|
7c95b65873 | ||
|
|
e46d5f3586 | ||
|
|
83e7fa5eee | ||
|
|
2e59e97ebd | ||
|
|
ded8f3d349 | ||
|
|
cc872eaf16 | ||
|
|
5f8822854d | ||
|
|
1d6140ec8a | ||
|
|
7be05df0b1 | ||
|
|
8aee823393 | ||
|
|
28ac1a1a91 | ||
|
|
fc3a878220 | ||
|
|
3b837d856c | ||
|
|
9890f23469 | ||
|
|
a7b3e9ce65 | ||
|
|
04e79e6a18 | ||
|
|
0e8aa2a1b2 | ||
|
|
e8c571b1ca | ||
|
|
e53a8a5baf | ||
|
|
4de271a7fc | ||
|
|
1c0e98717b | ||
|
|
ca6bda2f57 | ||
|
|
60d59af300 | ||
|
|
7b41966be9 | ||
|
|
6aca4e7cb8 | ||
|
|
07b1c70d6b | ||
|
|
15dd3727e0 | ||
|
|
e5f33c58cc | ||
|
|
842eb40e45 | ||
|
|
f9e7a0a3a4 | ||
|
|
51769a9b70 | ||
|
|
ca3e15bdd4 | ||
|
|
d3cc11dbdf | ||
|
|
4291c660da | ||
|
|
4aea2dee87 | ||
|
|
1739ce698a | ||
|
|
0d61d3d3bd | ||
|
|
fea933ff1e | ||
|
|
3b853b18b5 | ||
|
|
0da7cceae9 | ||
|
|
fcc943d552 | ||
|
|
229fe1d197 | ||
|
|
1046184f35 | ||
|
|
89f6fb0d5a | ||
|
|
eb6f8dc215 | ||
|
|
57aeeb308b | ||
|
|
6efa674004 | ||
|
|
a346b43837 | ||
|
|
6b2f2bd276 | ||
|
|
486c059479 | ||
|
|
3e95c19911 | ||
|
|
301b4156d5 | ||
|
|
247e6a8a78 | ||
|
|
e2e9b545ff | ||
|
|
9900f7c231 | ||
|
|
710502c6d8 | ||
|
|
8e9739449d | ||
|
|
b39b02c26e | ||
|
|
c34791ef5b | ||
|
|
c55293c230 | ||
|
|
8b3a502996 | ||
|
|
ea5c3ede4f | ||
|
|
19141b9432 | ||
|
|
625ebbf92d | ||
|
|
62e27ceb89 | ||
|
|
8b4fb64f1e | ||
|
|
c306fdeb38 | ||
|
|
6e32ecf62a | ||
|
|
197fbb123b | ||
|
|
5e024a0645 | ||
|
|
ae69d69525 | ||
|
|
cb8e274c07 | ||
|
|
2f144dcdee | ||
|
|
ce00710303 | ||
|
|
aec639e3e3 | ||
|
|
929797d939 | ||
|
|
e12d788bb7 | ||
|
|
d935b806a5 | ||
|
|
838de27666 | ||
|
|
745704ca69 | ||
|
|
3a82433ccf | ||
|
|
efab118251 | ||
|
|
49482222ca | ||
|
|
fce59ca5a1 | ||
|
|
0475cae3ac | ||
|
|
c9326140dc | ||
|
|
8a47d6e3a6 | ||
|
|
24b61f32ff | ||
|
|
e4a25d022c | ||
|
|
b5192355f6 | ||
|
|
dabbb94faf | ||
|
|
de4390cd40 | ||
|
|
4af0aec54d | ||
|
|
d751f65e71 | ||
|
|
edae20f926 | ||
|
|
38272be037 | ||
|
|
2503126d52 | ||
|
|
c9723a1c1f | ||
|
|
be36600327 | ||
|
|
253e86a72e | ||
|
|
66ad1b8984 | ||
|
|
c1b7da69cc | ||
|
|
c94307adfe | ||
|
|
0bc1061119 | ||
|
|
ce40330de1 | ||
|
|
8e0a3428d7 | ||
|
|
70781d3691 | ||
|
|
3dd72b924e | ||
|
|
d07d255830 | ||
|
|
3ad221ea40 | ||
|
|
d5cc911167 | ||
|
|
61251275fe | ||
|
|
b5355dfde8 | ||
|
|
d62076003d | ||
|
|
e708a33cd9 | ||
|
|
485b9fdefb | ||
|
|
72b43bec8b | ||
|
|
edf046ece7 | ||
|
|
ec6430bad8 | ||
|
|
83649b891d | ||
|
|
6eb3edec47 | ||
|
|
6b23d74ad1 | ||
|
|
6d103373e2 | ||
|
|
735e4d1f9d | ||
|
|
79d2e2e79c | ||
|
|
89a71e7390 | ||
|
|
c0772b4461 | ||
|
|
8aa121c2be | ||
|
|
cf2f2934ad | ||
|
|
56efa9b167 | ||
|
|
9b2ab506c5 | ||
|
|
84b06fc893 | ||
|
|
675ff0d5d0 | ||
|
|
157092d97a | ||
|
|
6204c3663e | ||
|
|
72d2ceaa9a | ||
|
|
2a234b788e | ||
|
|
6f68e32bfb | ||
|
|
91c3f32673 | ||
|
|
ae4acda1bb | ||
|
|
d9f694932c | ||
|
|
a3bad89d57 | ||
|
|
9228ae633f | ||
|
|
9d641d88b7 | ||
|
|
f0d8f66eaa | ||
|
|
5350b5d04d | ||
|
|
78cdb72cd6 | ||
|
|
d0541e36c0 | ||
|
|
753d1d9cde | ||
|
|
5282ba1e2b | ||
|
|
57481ca694 | ||
|
|
8a47bf13ef | ||
|
|
2e936afd5f | ||
|
|
4c8ada3904 | ||
|
|
9f7d6c6bc1 | ||
|
|
bd4d958aaf | ||
|
|
96f163d932 | ||
|
|
4fc85e5a92 | ||
|
|
7a892ce793 | ||
|
|
7d3ea4886c | ||
|
|
316044d6fa | ||
|
|
fae4a2c066 | ||
|
|
1ad142782a | ||
|
|
43411a0fd8 | ||
|
|
ce00e57a2a | ||
|
|
d5324ee111 | ||
|
|
5f7c908280 | ||
|
|
5b70f240cf | ||
|
|
68a6717312 | ||
|
|
e6af49fbea | ||
|
|
98fd558382 | ||
|
|
3b0083c92a | ||
|
|
cabe4a3005 | ||
|
|
7f02fafa28 | ||
|
|
7655bc0542 | ||
|
|
4b8c9c340c | ||
|
|
1bc9985eb7 | ||
|
|
72f3f23c4d | ||
|
|
b32014578a | ||
|
|
8f2a3cbb70 | ||
|
|
a9216edbaa | ||
|
|
882911a863 | ||
|
|
9b1f99ba6b | ||
|
|
1bc4feb6c9 | ||
|
|
527512b811 | ||
|
|
9f5d7d5ba2 | ||
|
|
1e1c220084 | ||
|
|
72832a6158 | ||
|
|
96bcc5d902 | ||
|
|
ec9224bf9a | ||
|
|
b6aef59334 | ||
|
|
f11fe931f5 | ||
|
|
827f198177 | ||
|
|
a5fb382a29 | ||
|
|
5ab7146e19 | ||
|
|
63e25010d6 | ||
|
|
25006acc17 | ||
|
|
f75a041f59 | ||
|
|
eee26138fe | ||
|
|
099b69fb1c | ||
|
|
1870b44d23 | ||
|
|
2cfa6a3001 | ||
|
|
14d8d41658 | ||
|
|
3d70cc393f | ||
|
|
66fc166b96 | ||
|
|
afb216c202 | ||
|
|
b0a0fb5c97 | ||
|
|
1f5473465d | ||
|
|
4953cd71df | ||
|
|
f54f877f27 | ||
|
|
3bf973edc7 | ||
|
|
121fce7500 | ||
|
|
63d87c08c8 | ||
|
|
d0fe417f5c | ||
|
|
de92fb6a28 | ||
|
|
003a25ae5c | ||
|
|
8b1af08c6e | ||
|
|
e7b559d2ca | ||
|
|
a1c9869394 | ||
|
|
5e21e15f96 | ||
|
|
edcf938173 | ||
|
|
c071cd5a0e | ||
|
|
d4eba4409f | ||
|
|
408f60540f | ||
|
|
023f215c68 | ||
|
|
f167b35de9 | ||
|
|
6326d744c9 | ||
|
|
ff155f7891 | ||
|
|
4bb26c95a9 | ||
|
|
e157c63f68 | ||
|
|
ce54477f6f | ||
|
|
caf43c3d7c | ||
|
|
3d060eae6c | ||
|
|
e7c3832ba6 | ||
|
|
be4ea209e8 | ||
|
|
88efc75bab | ||
|
|
9418344db4 |
@@ -35,9 +35,9 @@ sed -i \
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies and the dev-docs
|
||||
# group from the toml file. This means we don't have to ensure compatibility between
|
||||
# old deps and dev tools.
|
||||
|
||||
pip install toml wheel
|
||||
|
||||
@@ -47,6 +47,7 @@ with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
del data['tool']['poetry']['group']['dev-docs']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
|
||||
@@ -9,16 +9,6 @@ set -eu
|
||||
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
|
||||
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
|
||||
|
||||
block Set Go Version
|
||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||
|
||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||
echo "~/go/bin" >> $GITHUB_PATH
|
||||
endblock
|
||||
|
||||
block Install Complement Dependencies
|
||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||
go install -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
2
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -129,7 +129,7 @@ body:
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
|
||||
Please copy and paste any relevant log output as text (not images), ideally at INFO or DEBUG log level.
|
||||
This will be automatically formatted into code, so there is no need for backticks (`\``).
|
||||
|
||||
Please be careful to remove any personal or private data.
|
||||
|
||||
12
.github/workflows/docker.yml
vendored
12
.github/workflows/docker.yml
vendored
@@ -10,6 +10,7 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -34,11 +35,20 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
uses: docker/metadata-action@master
|
||||
with:
|
||||
images: matrixdotorg/synapse
|
||||
images: |
|
||||
docker.io/matrixdotorg/synapse
|
||||
ghcr.io/matrix-org/synapse
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
|
||||
2
.github/workflows/docs-pr-netlify.yaml
vendored
2
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||
- name: 📥 Download artifact
|
||||
uses: dawidd6/action-download-artifact@5e780fc7bbd0cac69fc73271ed86edf5dcb72d67 # v2.26.0
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0
|
||||
with:
|
||||
workflow: docs-pr.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
79
.github/workflows/docs.yaml
vendored
79
.github/workflows/docs.yaml
vendored
@@ -13,25 +13,10 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
pre:
|
||||
name: Calculate variables for GitHub Pages deployment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
# Figure out the target directory.
|
||||
#
|
||||
# The target directory depends on the name of the branch
|
||||
@@ -55,11 +40,65 @@ jobs:
|
||||
|
||||
# finally, set the 'branch-version' var.
|
||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
||||
|
||||
outputs:
|
||||
branch-version: ${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-docs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||
destination_dir: ./${{ needs.pre.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-devdocs:
|
||||
name: GitHub Pages (developer docs)
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: "Set up Sphinx"
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.3.2"
|
||||
groups: "dev-docs"
|
||||
extras: ""
|
||||
|
||||
- name: Build the documentation
|
||||
run: |
|
||||
cd dev-docs
|
||||
poetry run make html
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./dev-docs/_build/html
|
||||
destination_dir: ./dev-docs/${{ needs.pre.outputs.branch-version }}
|
||||
|
||||
14
.github/workflows/latest_deps.yml
vendored
14
.github/workflows/latest_deps.yml
vendored
@@ -27,9 +27,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
@@ -61,9 +59,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
@@ -134,9 +130,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
@@ -184,6 +178,8 @@ jobs:
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- uses: actions/setup-go@v4
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
|
||||
4
.github/workflows/release-artifacts.yml
vendored
4
.github/workflows/release-artifacts.yml
vendored
@@ -4,13 +4,15 @@ name: Build release artifacts
|
||||
|
||||
on:
|
||||
# we build on PRs and develop to (hopefully) get early warning
|
||||
# of things breaking (but only build one set of debs)
|
||||
# of things breaking (but only build one set of debs). PRs skip
|
||||
# building wheels on macOS & ARM.
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
|
||||
122
.github/workflows/tests.yml
vendored
122
.github/workflows/tests.yml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
pull_request:
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
@@ -33,6 +34,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -61,9 +65,59 @@ jobs:
|
||||
- run: .ci/scripts/check_lockfile.py
|
||||
|
||||
lint:
|
||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v2"
|
||||
with:
|
||||
typechecking-extras: "all"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
install-project: "false"
|
||||
|
||||
- name: Import order (isort)
|
||||
run: poetry run isort --check --diff .
|
||||
|
||||
- name: Code style (black)
|
||||
run: poetry run black --check --diff .
|
||||
|
||||
- name: Semantic checks (ruff)
|
||||
# --quiet suppresses the update check.
|
||||
run: poetry run ruff --quiet .
|
||||
|
||||
lint-mypy:
|
||||
runs-on: ubuntu-latest
|
||||
name: Typechecking
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
# We want to make use of type hints in optional dependencies too.
|
||||
extras: all
|
||||
# We have seen odd mypy failures that were resolved when we started
|
||||
# installing the project again:
|
||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
||||
# To make CI green, err towards caution and install the project.
|
||||
install-project: "true"
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# NB: I have two concerns with this action:
|
||||
# 1. We occasionally see odd mypy problems that aren't reproducible
|
||||
# locally with clean caches. I suspect some dodgy caching behaviour.
|
||||
# 2. The action uses GHA machinery that's deprecated
|
||||
# (https://github.com/AustinScola/mypy-cache-github-action/issues/277)
|
||||
# It may be simpler to use actions/cache ourselves to restore .mypy_cache.
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: AustinScola/mypy-cache-github-action@df56268388422ee282636ee2c7a9cc55ec644a41
|
||||
|
||||
- name: Run mypy
|
||||
run: poetry run mypy
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -94,6 +148,9 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
poetry-version: "1.3.2"
|
||||
@@ -109,12 +166,8 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
@@ -131,10 +184,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
components: clippy
|
||||
@@ -151,10 +201,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
# We use nightly so that it correctly groups together imports
|
||||
toolchain: nightly-2022-12-01
|
||||
@@ -168,6 +215,7 @@ jobs:
|
||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||
needs:
|
||||
- lint
|
||||
- lint-mypy
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- lint-pydantic
|
||||
@@ -219,12 +267,7 @@ jobs:
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -264,12 +307,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
@@ -385,12 +423,7 @@ jobs:
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Run SyTest
|
||||
@@ -530,14 +563,11 @@ jobs:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/setup-go@v4
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
@@ -561,12 +591,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo test
|
||||
@@ -584,10 +609,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
23
.github/workflows/twisted_trunk.yml
vendored
23
.github/workflows/twisted_trunk.yml
vendored
@@ -5,6 +5,13 @@ on:
|
||||
- cron: 0 8 * * *
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
twisted_ref:
|
||||
description: Commit, branch or tag to checkout from upstream Twisted.
|
||||
required: false
|
||||
default: 'trunk'
|
||||
type: string
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -18,9 +25,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -29,7 +34,7 @@ jobs:
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref }}
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
@@ -43,9 +48,7 @@ jobs:
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -82,9 +85,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||
with:
|
||||
toolchain: stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Patch dependencies
|
||||
@@ -140,6 +141,8 @@ jobs:
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- uses: actions/setup-go@v4
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -15,9 +15,10 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want the poetry and cargo lockfile.
|
||||
# We do want poetry, cargo and flake lockfiles.
|
||||
!poetry.lock
|
||||
!Cargo.lock
|
||||
!flake.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
@@ -38,6 +39,9 @@ __pycache__/
|
||||
/.envrc
|
||||
.direnv/
|
||||
|
||||
# For nix/devenv users
|
||||
.devenv/
|
||||
|
||||
# IDEs
|
||||
/.idea/
|
||||
/.ropeproject/
|
||||
@@ -53,6 +57,7 @@ __pycache__/
|
||||
/coverage.*
|
||||
/dist/
|
||||
/docs/build/
|
||||
/dev-docs/_build/
|
||||
/htmlcov
|
||||
/pip-wheel-metadata/
|
||||
|
||||
@@ -61,7 +66,7 @@ book/
|
||||
|
||||
# complement
|
||||
/complement-*
|
||||
/master.tar.gz
|
||||
/main.tar.gz
|
||||
|
||||
# rust
|
||||
/target/
|
||||
|
||||
347
CHANGES.md
347
CHANGES.md
@@ -1,3 +1,346 @@
|
||||
Synapse 1.83.0 (2023-05-09)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.83.0rc1.
|
||||
|
||||
|
||||
Synapse 1.83.0rc1 (2023-05-02)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Experimental support to recursively provide relations per [MSC3981](https://github.com/matrix-org/matrix-spec-proposals/pull/3981). ([\#15315](https://github.com/matrix-org/synapse/issues/15315))
|
||||
- Experimental support for [MSC3970](https://github.com/matrix-org/matrix-spec-proposals/pull/3970): Scope transaction IDs to devices. ([\#15318](https://github.com/matrix-org/synapse/issues/15318))
|
||||
- Add an [admin API endpoint](https://matrix-org.github.io/synapse/v1.83/admin_api/experimental_features.html) to support per-user feature flags. ([\#15344](https://github.com/matrix-org/synapse/issues/15344))
|
||||
- Add a module API to send an HTTP push notification. ([\#15387](https://github.com/matrix-org/synapse/issues/15387))
|
||||
- Add an [admin API endpoint](https://matrix-org.github.io/synapse/v1.83/admin_api/statistics.html#get-largest-rooms-by-size-in-database) to query the largest rooms by disk space used in the database. ([\#15482](https://github.com/matrix-org/synapse/issues/15482))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Disable push rule evaluation for rooms excluded from sync. ([\#15361](https://github.com/matrix-org/synapse/issues/15361))
|
||||
- Fix a long-standing bug where cached server key results which were directly fetched would not be properly re-used. ([\#15417](https://github.com/matrix-org/synapse/issues/15417))
|
||||
- Fix a bug introduced in Synapse 1.73.0 where some experimental push rules were returned by default. ([\#15494](https://github.com/matrix-org/synapse/issues/15494))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add Nginx loadbalancing example with sticky mxid for workers. ([\#15411](https://github.com/matrix-org/synapse/issues/15411))
|
||||
- Update outdated development docs that mention restrictions in versions of SQLite that we no longer support. ([\#15498](https://github.com/matrix-org/synapse/issues/15498))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Speedup tests by caching HomeServerConfig instances. ([\#15284](https://github.com/matrix-org/synapse/issues/15284))
|
||||
- Add denormalised event stream ordering column to membership state tables for future use. Contributed by Nick @ Beeper (@fizzadar). ([\#15356](https://github.com/matrix-org/synapse/issues/15356))
|
||||
- Always use multi-user device resync replication endpoints. ([\#15418](https://github.com/matrix-org/synapse/issues/15418))
|
||||
- Add column `full_user_id` to tables `profiles` and `user_filters`. ([\#15458](https://github.com/matrix-org/synapse/issues/15458))
|
||||
- Update support for [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) to allow always returning fallback-keys in a `/keys/claim` request. ([\#15462](https://github.com/matrix-org/synapse/issues/15462))
|
||||
- Improve type hints. ([\#15465](https://github.com/matrix-org/synapse/issues/15465), [\#15496](https://github.com/matrix-org/synapse/issues/15496), [\#15497](https://github.com/matrix-org/synapse/issues/15497))
|
||||
- Support claiming more than one OTK at a time. ([\#15468](https://github.com/matrix-org/synapse/issues/15468))
|
||||
- Bump types-pyyaml from 6.0.12.8 to 6.0.12.9. ([\#15471](https://github.com/matrix-org/synapse/issues/15471))
|
||||
- Bump pyasn1-modules from 0.2.8 to 0.3.0. ([\#15473](https://github.com/matrix-org/synapse/issues/15473))
|
||||
- Bump cryptography from 40.0.1 to 40.0.2. ([\#15474](https://github.com/matrix-org/synapse/issues/15474))
|
||||
- Bump types-netaddr from 0.8.0.7 to 0.8.0.8. ([\#15475](https://github.com/matrix-org/synapse/issues/15475))
|
||||
- Bump types-jsonschema from 4.17.0.6 to 4.17.0.7. ([\#15476](https://github.com/matrix-org/synapse/issues/15476))
|
||||
- Ask bug reporters to provide logs as text. ([\#15479](https://github.com/matrix-org/synapse/issues/15479))
|
||||
- Add a Nix flake for use as a development environment. ([\#15495](https://github.com/matrix-org/synapse/issues/15495))
|
||||
- Bump anyhow from 1.0.70 to 1.0.71. ([\#15507](https://github.com/matrix-org/synapse/issues/15507))
|
||||
- Bump types-pillow from 9.4.0.19 to 9.5.0.2. ([\#15508](https://github.com/matrix-org/synapse/issues/15508))
|
||||
- Bump packaging from 23.0 to 23.1. ([\#15510](https://github.com/matrix-org/synapse/issues/15510))
|
||||
- Bump types-requests from 2.28.11.16 to 2.29.0.0. ([\#15511](https://github.com/matrix-org/synapse/issues/15511))
|
||||
- Bump setuptools-rust from 1.5.2 to 1.6.0. ([\#15512](https://github.com/matrix-org/synapse/issues/15512))
|
||||
- Update the check_schema_delta script to account for when the schema version has been bumped locally. ([\#15466](https://github.com/matrix-org/synapse/issues/15466))
|
||||
|
||||
|
||||
Synapse 1.82.0 (2023-04-25)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.82.0rc1.
|
||||
|
||||
|
||||
Synapse 1.82.0rc1 (2023-04-18)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Allow loading the `/directory/room/{roomAlias}` endpoint on workers. ([\#15333](https://github.com/matrix-org/synapse/issues/15333))
|
||||
- Add some validation to `instance_map` configuration loading. ([\#15431](https://github.com/matrix-org/synapse/issues/15431))
|
||||
- Allow loading the `/capabilities` endpoint on workers. ([\#15436](https://github.com/matrix-org/synapse/issues/15436))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Delete server-side backup keys when deactivating an account. ([\#15181](https://github.com/matrix-org/synapse/issues/15181))
|
||||
- Fix and document untold assumption that `on_logged_out` module hooks will be called before the deletion of pushers. ([\#15410](https://github.com/matrix-org/synapse/issues/15410))
|
||||
- Improve robustness when handling a perspective key response by deduplicating received server keys. ([\#15423](https://github.com/matrix-org/synapse/issues/15423))
|
||||
- Synapse now correctly fails to start if the config option `app_service_config_files` is not a list. ([\#15425](https://github.com/matrix-org/synapse/issues/15425))
|
||||
- Disable loading `RefreshTokenServlet` (`/_matrix/client/(r0|v3|unstable)/refresh`) on workers. ([\#15428](https://github.com/matrix-org/synapse/issues/15428))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Note that the `delete_stale_devices_after` background job always runs on the main process. ([\#15452](https://github.com/matrix-org/synapse/issues/15452))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove the broken, unspecced registration fallback. Note that the *login* fallback is unaffected by this change. ([\#15405](https://github.com/matrix-org/synapse/issues/15405))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Bump black from 23.1.0 to 23.3.0. ([\#15372](https://github.com/matrix-org/synapse/issues/15372))
|
||||
- Bump pyopenssl from 23.1.0 to 23.1.1. ([\#15373](https://github.com/matrix-org/synapse/issues/15373))
|
||||
- Bump types-psycopg2 from 2.9.21.8 to 2.9.21.9. ([\#15374](https://github.com/matrix-org/synapse/issues/15374))
|
||||
- Bump types-netaddr from 0.8.0.6 to 0.8.0.7. ([\#15375](https://github.com/matrix-org/synapse/issues/15375))
|
||||
- Bump types-opentracing from 2.4.10.3 to 2.4.10.4. ([\#15376](https://github.com/matrix-org/synapse/issues/15376))
|
||||
- Bump dawidd6/action-download-artifact from 2.26.0 to 2.26.1. ([\#15404](https://github.com/matrix-org/synapse/issues/15404))
|
||||
- Bump parameterized from 0.8.1 to 0.9.0. ([\#15412](https://github.com/matrix-org/synapse/issues/15412))
|
||||
- Bump types-pillow from 9.4.0.17 to 9.4.0.19. ([\#15413](https://github.com/matrix-org/synapse/issues/15413))
|
||||
- Bump sentry-sdk from 1.17.0 to 1.19.1. ([\#15414](https://github.com/matrix-org/synapse/issues/15414))
|
||||
- Bump immutabledict from 2.2.3 to 2.2.4. ([\#15415](https://github.com/matrix-org/synapse/issues/15415))
|
||||
- Bump dawidd6/action-download-artifact from 2.26.1 to 2.27.0. ([\#15441](https://github.com/matrix-org/synapse/issues/15441))
|
||||
- Bump serde_json from 1.0.95 to 1.0.96. ([\#15442](https://github.com/matrix-org/synapse/issues/15442))
|
||||
- Bump serde from 1.0.159 to 1.0.160. ([\#15443](https://github.com/matrix-org/synapse/issues/15443))
|
||||
- Bump pillow from 9.4.0 to 9.5.0. ([\#15444](https://github.com/matrix-org/synapse/issues/15444))
|
||||
- Bump furo from 2023.3.23 to 2023.3.27. ([\#15445](https://github.com/matrix-org/synapse/issues/15445))
|
||||
- Bump types-pyopenssl from 23.1.0.0 to 23.1.0.2. ([\#15446](https://github.com/matrix-org/synapse/issues/15446))
|
||||
- Bump mypy from 1.0.0 to 1.0.1. ([\#15447](https://github.com/matrix-org/synapse/issues/15447))
|
||||
- Bump psycopg2 from 2.9.5 to 2.9.6. ([\#15448](https://github.com/matrix-org/synapse/issues/15448))
|
||||
- Improve DB performance of clearing out old data from `stream_ordering_to_exterm`. ([\#15382](https://github.com/matrix-org/synapse/issues/15382), [\#15429](https://github.com/matrix-org/synapse/issues/15429))
|
||||
- Implement [MSC3989](https://github.com/matrix-org/matrix-spec-proposals/pull/3989) redaction algorithm. ([\#15393](https://github.com/matrix-org/synapse/issues/15393))
|
||||
- Implement [MSC2175](https://github.com/matrix-org/matrix-doc/pull/2175) to stop adding `creator` to create events. ([\#15394](https://github.com/matrix-org/synapse/issues/15394))
|
||||
- Implement [MSC2174](https://github.com/matrix-org/matrix-spec-proposals/pull/2174) to move the `redacts` key to a `content` property. ([\#15395](https://github.com/matrix-org/synapse/issues/15395))
|
||||
- Trust dtonlay/rust-toolchain in CI. ([\#15406](https://github.com/matrix-org/synapse/issues/15406))
|
||||
- Explicitly install Synapse during typechecking in CI. ([\#15409](https://github.com/matrix-org/synapse/issues/15409))
|
||||
- Only load the SSO redirect servlet if SSO is enabled. ([\#15421](https://github.com/matrix-org/synapse/issues/15421))
|
||||
- Refactor `SimpleHttpClient` to pull out a base class. ([\#15427](https://github.com/matrix-org/synapse/issues/15427))
|
||||
- Improve type hints. ([\#15432](https://github.com/matrix-org/synapse/issues/15432))
|
||||
- Convert async to normal tests in `TestSSOHandler`. ([\#15433](https://github.com/matrix-org/synapse/issues/15433))
|
||||
- Speed up the user directory background update. ([\#15435](https://github.com/matrix-org/synapse/issues/15435))
|
||||
- Disable directory listing for static resources in `/_matrix/static/`. ([\#15438](https://github.com/matrix-org/synapse/issues/15438))
|
||||
- Move various module API callback registration methods to a dedicated class. ([\#15453](https://github.com/matrix-org/synapse/issues/15453))
|
||||
|
||||
|
||||
Synapse 1.81.0 (2023-04-11)
|
||||
===========================
|
||||
|
||||
Synapse now attempts the versioned appservice paths before falling back to the
|
||||
[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes).
|
||||
Usage of the legacy routes should be considered deprecated.
|
||||
|
||||
Additionally, Synapse has supported sending the application service access token
|
||||
via [the `Authorization` header](https://spec.matrix.org/v1.6/application-service-api/#authorization)
|
||||
since v1.70.0. For backwards compatibility it is *also* sent as the `access_token`
|
||||
query parameter. This is insecure and should be considered deprecated.
|
||||
|
||||
A future version of Synapse (v1.88.0 or later) will remove support for legacy
|
||||
application service routes and query parameter authorization.
|
||||
|
||||
|
||||
No significant changes since 1.81.0rc2.
|
||||
|
||||
|
||||
Synapse 1.81.0rc2 (2023-04-06)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix the `set_device_id_for_pushers_txn` background update crash. ([\#15391](https://github.com/matrix-org/synapse/issues/15391))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Update CI to run complement under the latest stable go version. ([\#15403](https://github.com/matrix-org/synapse/issues/15403))
|
||||
|
||||
|
||||
Synapse 1.81.0rc1 (2023-04-04)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add the ability to enable/disable registrations when in the OIDC flow. ([\#14978](https://github.com/matrix-org/synapse/issues/14978))
|
||||
- Add a primitive helper script for listing worker endpoints. ([\#15243](https://github.com/matrix-org/synapse/issues/15243))
|
||||
- Experimental support for passing One Time Key and device key requests to application services ([MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983) and [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984)). ([\#15314](https://github.com/matrix-org/synapse/issues/15314), [\#15321](https://github.com/matrix-org/synapse/issues/15321))
|
||||
- Allow loading `/password_policy` endpoint on workers. ([\#15331](https://github.com/matrix-org/synapse/issues/15331))
|
||||
- Add experimental support for Unix sockets. Contributed by Jason Little. ([\#15353](https://github.com/matrix-org/synapse/issues/15353))
|
||||
- Build Debian packages for Ubuntu 23.04 (Lunar Lobster). ([\#15381](https://github.com/matrix-org/synapse/issues/15381))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled. ([\#15295](https://github.com/matrix-org/synapse/issues/15295))
|
||||
- Fix a bug introduced in Synapse v1.55.0 which could delay remote homeservers being able to decrypt encrypted messages sent by local users. ([\#15297](https://github.com/matrix-org/synapse/issues/15297))
|
||||
- Add a check to [SQLite port_db script](https://matrix-org.github.io/synapse/latest/postgres.html#porting-from-sqlite)
|
||||
to ensure that the sqlite database passed to the script exists before trying to port from it. ([\#15306](https://github.com/matrix-org/synapse/issues/15306))
|
||||
- Fix a bug introduced in Synapse 1.76.0 where responses from worker deployments could include an internal `_INT_STREAM_POS` key. ([\#15309](https://github.com/matrix-org/synapse/issues/15309))
|
||||
- Fix a long-standing bug that Synpase only used the [legacy appservice routes](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes). ([\#15317](https://github.com/matrix-org/synapse/issues/15317))
|
||||
- Fix a long-standing bug preventing users from rejoining rooms after being banned and unbanned over federation. Contributed by Nico. ([\#15323](https://github.com/matrix-org/synapse/issues/15323))
|
||||
- Fix bug in worker mode where on a rolling restart of workers the "typing" worker would consume 100% CPU until it got restarted. ([\#15332](https://github.com/matrix-org/synapse/issues/15332))
|
||||
- Fix a long-standing bug where some to_device messages could be dropped when using workers. ([\#15349](https://github.com/matrix-org/synapse/issues/15349))
|
||||
- Fix a bug introduced in Synapse 1.70.0 where the background sync from a faster join could spin for hours when one of the events involved had been marked for backoff. ([\#15351](https://github.com/matrix-org/synapse/issues/15351))
|
||||
- Fix missing app variable in mail subject for password resets. Contributed by Cyberes. ([\#15352](https://github.com/matrix-org/synapse/issues/15352))
|
||||
- Fix a rare bug introduced in Synapse 1.66.0 where initial syncs would fail when the user had been kicked from a faster joined room that had not finished syncing. ([\#15383](https://github.com/matrix-org/synapse/issues/15383))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix a typo in login requests ratelimit defaults. ([\#15341](https://github.com/matrix-org/synapse/issues/15341))
|
||||
- Add some clarification to the doc/comments regarding TCP replication. ([\#15354](https://github.com/matrix-org/synapse/issues/15354))
|
||||
- Note that Synapse 1.74 queued a rebuild of the user directory tables. ([\#15386](https://github.com/matrix-org/synapse/issues/15386))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Use `immutabledict` instead of `frozendict`. ([\#15113](https://github.com/matrix-org/synapse/issues/15113))
|
||||
- Add developer documentation for the Federation Sender and add a documentation mechanism using Sphinx. ([\#15265](https://github.com/matrix-org/synapse/issues/15265), [\#15336](https://github.com/matrix-org/synapse/issues/15336))
|
||||
- Make the pushers rely on the `device_id` instead of the `access_token_id` for various operations. ([\#15280](https://github.com/matrix-org/synapse/issues/15280))
|
||||
- Bump sentry-sdk from 1.15.0 to 1.17.0. ([\#15285](https://github.com/matrix-org/synapse/issues/15285))
|
||||
- Allow running the Twisted trunk job against other branches. ([\#15302](https://github.com/matrix-org/synapse/issues/15302))
|
||||
- Remind the releaser to ask for changelog feedback in [#synapse-dev](https://matrix.to/#/#synapse-dev:matrix.org). ([\#15303](https://github.com/matrix-org/synapse/issues/15303))
|
||||
- Bump dtolnay/rust-toolchain from e12eda571dc9a5ee5d58eecf4738ec291c66f295 to fc3253060d0c959bea12a59f10f8391454a0b02d. ([\#15304](https://github.com/matrix-org/synapse/issues/15304))
|
||||
- Reject events with an invalid "mentions" property per [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). ([\#15311](https://github.com/matrix-org/synapse/issues/15311))
|
||||
- As an optimisation, use `TRUNCATE` on Postgres when clearing the user directory tables. ([\#15316](https://github.com/matrix-org/synapse/issues/15316))
|
||||
- Fix `.gitignore` rule for the Complement source tarball downloaded automatically by `complement.sh`. ([\#15319](https://github.com/matrix-org/synapse/issues/15319))
|
||||
- Bump serde from 1.0.157 to 1.0.158. ([\#15324](https://github.com/matrix-org/synapse/issues/15324))
|
||||
- Bump regex from 1.7.1 to 1.7.3. ([\#15325](https://github.com/matrix-org/synapse/issues/15325))
|
||||
- Bump types-pyopenssl from 23.0.0.4 to 23.1.0.0. ([\#15326](https://github.com/matrix-org/synapse/issues/15326))
|
||||
- Bump furo from 2022.12.7 to 2023.3.23. ([\#15327](https://github.com/matrix-org/synapse/issues/15327))
|
||||
- Bump ruff from 0.0.252 to 0.0.259. ([\#15328](https://github.com/matrix-org/synapse/issues/15328))
|
||||
- Bump cryptography from 40.0.0 to 40.0.1. ([\#15329](https://github.com/matrix-org/synapse/issues/15329))
|
||||
- Bump mypy-zope from 0.9.0 to 0.9.1. ([\#15330](https://github.com/matrix-org/synapse/issues/15330))
|
||||
- Speed up unit tests when using SQLite3. ([\#15334](https://github.com/matrix-org/synapse/issues/15334))
|
||||
- Speed up pydantic CI job. ([\#15339](https://github.com/matrix-org/synapse/issues/15339))
|
||||
- Speed up sample config CI job. ([\#15340](https://github.com/matrix-org/synapse/issues/15340))
|
||||
- Fix copyright year in SSO footer template. ([\#15358](https://github.com/matrix-org/synapse/issues/15358))
|
||||
- Bump peaceiris/actions-gh-pages from 3.9.2 to 3.9.3. ([\#15369](https://github.com/matrix-org/synapse/issues/15369))
|
||||
- Bump serde from 1.0.158 to 1.0.159. ([\#15370](https://github.com/matrix-org/synapse/issues/15370))
|
||||
- Bump serde_json from 1.0.94 to 1.0.95. ([\#15371](https://github.com/matrix-org/synapse/issues/15371))
|
||||
- Speed up membership queries for users with forgotten rooms. ([\#15385](https://github.com/matrix-org/synapse/issues/15385))
|
||||
|
||||
|
||||
Synapse 1.80.0 (2023-03-28)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.80.0rc2.
|
||||
|
||||
|
||||
Synapse 1.80.0rc2 (2023-03-22)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug in which the [`POST /_matrix/client/v3/rooms/{roomId}/report/{eventId}`](https://spec.matrix.org/v1.6/client-server-api/#post_matrixclientv3roomsroomidreporteventid) endpoint would return the wrong error if the user did not have permission to view the event. This aligns Synapse's implementation with [MSC2249](https://github.com/matrix-org/matrix-spec-proposals/pull/2249). ([\#15298](https://github.com/matrix-org/synapse/issues/15298), [\#15300](https://github.com/matrix-org/synapse/issues/15300))
|
||||
- Fix a bug introduced in Synapse 1.75.0rc1 where the [SQLite port_db script](https://matrix-org.github.io/synapse/latest/postgres.html#porting-from-sqlite)
|
||||
would fail to open the SQLite database. ([\#15301](https://github.com/matrix-org/synapse/issues/15301))
|
||||
|
||||
|
||||
Synapse 1.80.0rc1 (2023-03-21)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Stabilise support for [MSC3966](https://github.com/matrix-org/matrix-spec-proposals/pull/3966): `event_property_contains` push condition. ([\#15187](https://github.com/matrix-org/synapse/issues/15187))
|
||||
- Implement [MSC2659](https://github.com/matrix-org/matrix-spec-proposals/pull/2659): application service ping endpoint. Contributed by Tulir @ Beeper. ([\#15249](https://github.com/matrix-org/synapse/issues/15249))
|
||||
- Allow loading `/register/available` endpoint on workers. ([\#15268](https://github.com/matrix-org/synapse/issues/15268))
|
||||
- Improve performance of creating and authenticating events. ([\#15195](https://github.com/matrix-org/synapse/issues/15195))
|
||||
- Add topic and name events to group of events that are batch persisted when creating a room. ([\#15229](https://github.com/matrix-org/synapse/issues/15229))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing bug in which the user directory would assume any remote membership state events represent a profile change. ([\#14755](https://github.com/matrix-org/synapse/issues/14755), [\#14756](https://github.com/matrix-org/synapse/issues/14756))
|
||||
- Implement [MSC3873](https://github.com/matrix-org/matrix-spec-proposals/pull/3873) to fix a long-standing bug where properties with dots were handled ambiguously in push rules. ([\#15190](https://github.com/matrix-org/synapse/issues/15190))
|
||||
- Faster joins: Fix a bug introduced in Synapse 1.66 where spurious "Failed to find memberships ..." errors would be logged. ([\#15232](https://github.com/matrix-org/synapse/issues/15232))
|
||||
- Fix a long-standing error when sending message into deleted room. ([\#15235](https://github.com/matrix-org/synapse/issues/15235))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Ensure the Dockerfile builds on platforms that don't have a `cryptography` wheel. ([\#15239](https://github.com/matrix-org/synapse/issues/15239))
|
||||
- Mirror images to the GitHub Container Registry (`ghcr.io/matrix-org/synapse`). ([\#15281](https://github.com/matrix-org/synapse/issues/15281), [\#15282](https://github.com/matrix-org/synapse/issues/15282))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add a missing endpoint to the workers documentation. ([\#15223](https://github.com/matrix-org/synapse/issues/15223))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add additional functionality to declaring worker types when starting Complement in worker mode. ([\#14921](https://github.com/matrix-org/synapse/issues/14921))
|
||||
- Add `Synapse-Trace-Id` to `access-control-expose-headers` header. ([\#14974](https://github.com/matrix-org/synapse/issues/14974))
|
||||
- Make the `HttpTransactionCache` use the `Requester` in addition of the just the `Request` to build the transaction key. ([\#15200](https://github.com/matrix-org/synapse/issues/15200))
|
||||
- Improve log lines when purging rooms. ([\#15222](https://github.com/matrix-org/synapse/issues/15222))
|
||||
- Improve type hints. ([\#15230](https://github.com/matrix-org/synapse/issues/15230), [\#15231](https://github.com/matrix-org/synapse/issues/15231), [\#15238](https://github.com/matrix-org/synapse/issues/15238))
|
||||
- Move various module API callback registration methods to a dedicated class. ([\#15237](https://github.com/matrix-org/synapse/issues/15237))
|
||||
- Configure GitHub Actions for merge queues. ([\#15244](https://github.com/matrix-org/synapse/issues/15244))
|
||||
- Add schema comments about the `destinations` and `destination_rooms` tables. ([\#15247](https://github.com/matrix-org/synapse/issues/15247))
|
||||
- Skip processing of auto-join room behaviour if there are no auto-join rooms configured. ([\#15262](https://github.com/matrix-org/synapse/issues/15262))
|
||||
- Remove unused store method `_set_destination_retry_timings_emulated`. ([\#15266](https://github.com/matrix-org/synapse/issues/15266))
|
||||
- Reorganize URL preview code. ([\#15269](https://github.com/matrix-org/synapse/issues/15269))
|
||||
- Clean-up direct TCP replication code. ([\#15272](https://github.com/matrix-org/synapse/issues/15272), [\#15274](https://github.com/matrix-org/synapse/issues/15274))
|
||||
- Make `configure_workers_and_start` script used in Complement tests compatible with older versions of Python. ([\#15275](https://github.com/matrix-org/synapse/issues/15275))
|
||||
- Add a `/versions` flag for [MSC3952](https://github.com/matrix-org/matrix-spec-proposals/pull/3952). ([\#15293](https://github.com/matrix-org/synapse/issues/15293))
|
||||
- Bump hiredis from 2.2.1 to 2.2.2. ([\#15252](https://github.com/matrix-org/synapse/issues/15252))
|
||||
- Bump serde from 1.0.152 to 1.0.155. ([\#15253](https://github.com/matrix-org/synapse/issues/15253))
|
||||
- Bump pysaml2 from 7.2.1 to 7.3.1. ([\#15254](https://github.com/matrix-org/synapse/issues/15254))
|
||||
- Bump msgpack from 1.0.4 to 1.0.5. ([\#15255](https://github.com/matrix-org/synapse/issues/15255))
|
||||
- Bump gitpython from 3.1.30 to 3.1.31. ([\#15256](https://github.com/matrix-org/synapse/issues/15256))
|
||||
- Bump cryptography from 39.0.1 to 39.0.2. ([\#15257](https://github.com/matrix-org/synapse/issues/15257))
|
||||
- Bump pydantic from 1.10.4 to 1.10.6. ([\#15286](https://github.com/matrix-org/synapse/issues/15286))
|
||||
- Bump serde from 1.0.155 to 1.0.157. ([\#15287](https://github.com/matrix-org/synapse/issues/15287))
|
||||
- Bump anyhow from 1.0.69 to 1.0.70. ([\#15288](https://github.com/matrix-org/synapse/issues/15288))
|
||||
- Bump txredisapi from 1.4.7 to 1.4.9. ([\#15289](https://github.com/matrix-org/synapse/issues/15289))
|
||||
- Bump pygithub from 1.57 to 1.58.1. ([\#15290](https://github.com/matrix-org/synapse/issues/15290))
|
||||
- Bump types-requests from 2.28.11.12 to 2.28.11.15. ([\#15291](https://github.com/matrix-org/synapse/issues/15291))
|
||||
|
||||
|
||||
|
||||
Synapse 1.79.0 (2023-03-14)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.79.0rc2.
|
||||
|
||||
|
||||
Synapse 1.79.0rc2 (2023-03-13)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.79.0rc1 where attempting to register a `on_remove_user_third_party_identifier` module API callback would be a no-op. ([\#15227](https://github.com/matrix-org/synapse/issues/15227))
|
||||
- Fix a rare bug introduced in Synapse 1.73 where events could remain unsent to other homeservers after a faster-join to a room. ([\#15248](https://github.com/matrix-org/synapse/issues/15248))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Refactor `filter_events_for_server`. ([\#15240](https://github.com/matrix-org/synapse/issues/15240))
|
||||
|
||||
|
||||
Synapse 1.79.0rc1 (2023-03-07)
|
||||
==============================
|
||||
|
||||
@@ -47,7 +390,7 @@ Improved Documentation
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Deprecate the `on_threepid_bind` module callback, to be replaced by [`on_add_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_add_user_third_party_identifier). See [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.79/docs/upgrade.md#upgrading-to-v1790). ([\#15044]
|
||||
- Deprecate the `on_threepid_bind` module callback, to be replaced by [`on_add_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_add_user_third_party_identifier). See [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.79/docs/upgrade.md#upgrading-to-v1790). ([\#15044](https://github.com/matrix-org/synapse/issues/15044))
|
||||
- Remove the unspecced `room_alias` field from the [`/createRoom`](https://spec.matrix.org/v1.6/client-server-api/#post_matrixclientv3createroom) response. ([\#15093](https://github.com/matrix-org/synapse/issues/15093))
|
||||
- Remove the unspecced `PUT` on the `/knock/{roomIdOrAlias}` endpoint. ([\#15189](https://github.com/matrix-org/synapse/issues/15189))
|
||||
- Remove the undocumented and unspecced `type` parameter to the `/thumbnail` endpoint. ([\#15137](https://github.com/matrix-org/synapse/issues/15137))
|
||||
@@ -288,7 +631,7 @@ Those who are `poetry install`ing from source using our lockfile should ensure t
|
||||
Notes on faster joins
|
||||
---------------------
|
||||
|
||||
The faster joins project sees the most benefit when joining a room with a large number of members (joined or historical). We expect it to be particularly useful for joining large public rooms like the [Matrix HQ](https://matrix.to/#/#matrix:matrix.org) or [Synapse Admins](https://matrix.to/#/#synapse:matrix.org) rooms.
|
||||
The faster joins project sees the most benefit when joining a room with a large number of members (joined or historical). We expect it to be particularly useful for joining large public rooms like the [Matrix HQ](https://matrix.to/#/#matrix:matrix.org) or [Synapse Admins](https://matrix.to/#/#synapse:matrix.org) rooms.
|
||||
|
||||
After a faster join, Synapse considers that room "partially joined". In this state, you should be able to
|
||||
|
||||
|
||||
49
Cargo.lock
generated
49
Cargo.lock
generated
@@ -13,9 +13,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.69"
|
||||
version = "1.0.71"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800"
|
||||
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -185,9 +185,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.46"
|
||||
version = "1.0.52"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
|
||||
checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@@ -250,7 +250,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -261,7 +261,7 @@ checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -276,9 +276,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.21"
|
||||
version = "1.0.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||
checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
@@ -294,9 +294,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.7.1"
|
||||
version = "1.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
|
||||
checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -305,9 +305,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.27"
|
||||
version = "0.6.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
@@ -323,29 +323,29 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.152"
|
||||
version = "1.0.162"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
|
||||
checksum = "71b2f6e1ab5c2b98c05f0f35b236b22e8df7ead6ffbf51d7808da7f8817e7ab6"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.152"
|
||||
version = "1.0.162"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
||||
checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.94"
|
||||
version = "1.0.96"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea"
|
||||
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
@@ -375,6 +375,17 @@ dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aad1363ed6d37b84299588d62d3a7d95b5a5c2d9aad5c85609fda12afaa1f40"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "synapse"
|
||||
version = "0.1.0"
|
||||
|
||||
1
changelog.d/15025.misc
Normal file
1
changelog.d/15025.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use oEmbed to generate URL previews for YouTube Shorts.
|
||||
@@ -1 +0,0 @@
|
||||
Stabilise support for [MSC3966](https://github.com/matrix-org/matrix-spec-proposals/pull/3966): `event_property_contains` push condition.
|
||||
@@ -1 +0,0 @@
|
||||
Implement [MSC3873](https://github.com/matrix-org/matrix-spec-proposals/pull/3873) to fix a long-standing bug where properties with dots were handled ambiguously in push rules.
|
||||
@@ -1 +0,0 @@
|
||||
Improve performance of creating and authenticating events.
|
||||
1
changelog.d/15197.feature
Normal file
1
changelog.d/15197.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add an option to prevent media downloads from configured domains.
|
||||
@@ -1 +0,0 @@
|
||||
Make the `HttpTransactionCache` use the `Requester` in addition of the just the `Request` to build the transaction key.
|
||||
@@ -1 +0,0 @@
|
||||
Add a missing endpoint to the workers documentation.
|
||||
1
changelog.d/15224.feature
Normal file
1
changelog.d/15224.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add `forget_rooms_on_leave` config option to automatically forget rooms when users leave them or are removed from them.
|
||||
1
changelog.d/15345.feature
Normal file
1
changelog.d/15345.feature
Normal file
@@ -0,0 +1 @@
|
||||
Follow-up to adding experimental feature flags per-user (#15344) which moves experimental features MSC3026 (busy presence), MSC3881 (remotely toggle push notifications for another client), and MSC3967 (Do not require UIA when first uploading cross signing keys) from the experimental config to per-user flags.
|
||||
1
changelog.d/15437.misc
Normal file
1
changelog.d/15437.misc
Normal file
@@ -0,0 +1 @@
|
||||
Make the `thread_id` column on `event_push_actions`, `event_push_actions_staging`, and `event_push_summary` non-null.
|
||||
1
changelog.d/15470.misc
Normal file
1
changelog.d/15470.misc
Normal file
@@ -0,0 +1 @@
|
||||
Create new `Client` for use with HTTP Replication between workers. Contributed by Jason Little.
|
||||
1
changelog.d/15509.misc
Normal file
1
changelog.d/15509.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump pyicu from 2.10.2 to 2.11.
|
||||
1
changelog.d/15516.feature
Normal file
1
changelog.d/15516.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add a config option to delay push notifications by a random amount, to discourage time-based profiling.
|
||||
1
changelog.d/15522.misc
Normal file
1
changelog.d/15522.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove references to supporting per-user flag for [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654) (#15522).
|
||||
1
changelog.d/15523.bugfix
Normal file
1
changelog.d/15523.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Don't fail on federation over TOR where SRV queries are not supported. Contributed by Zdzichu.
|
||||
1
changelog.d/15527.misc
Normal file
1
changelog.d/15527.misc
Normal file
@@ -0,0 +1 @@
|
||||
Don't use a trusted key server when running the demo scripts.
|
||||
1
changelog.d/15528.feature
Normal file
1
changelog.d/15528.feature
Normal file
@@ -0,0 +1 @@
|
||||
Stabilize support for [MSC2659](https://github.com/matrix-org/matrix-spec-proposals/pull/2659): application service ping endpoint. Contributed by Tulir @ Beeper.
|
||||
1
changelog.d/15529.misc
Normal file
1
changelog.d/15529.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up rebuilding of the user directory for local users.
|
||||
1
changelog.d/15531.misc
Normal file
1
changelog.d/15531.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up deleting of old rows in `event_push_actions`.
|
||||
1
changelog.d/15532.misc
Normal file
1
changelog.d/15532.misc
Normal file
@@ -0,0 +1 @@
|
||||
Install the `xmlsec` and `mdbook` packages and switch back to the upstream [cachix/devenv](https://github.com/cachix/devenv) repo in the nix development environment.
|
||||
1
changelog.d/15533.misc
Normal file
1
changelog.d/15533.misc
Normal file
@@ -0,0 +1 @@
|
||||
Install the `xmlsec` and `mdbook` packages and switch back to the upstream [cachix/devenv](https://github.com/cachix/devenv) repo in the nix development environment.
|
||||
1
changelog.d/15534.misc
Normal file
1
changelog.d/15534.misc
Normal file
@@ -0,0 +1 @@
|
||||
Implement [MSC3987](https://github.com/matrix-org/matrix-spec-proposals/pull/3987) by removing `"dont_notify"` from the list of actions in default push rules.
|
||||
1
changelog.d/15535.misc
Normal file
1
changelog.d/15535.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move various module API callback registration methods to a dedicated class.
|
||||
1
changelog.d/15536.feature
Normal file
1
changelog.d/15536.feature
Normal file
@@ -0,0 +1 @@
|
||||
Implement [MSC4009](https://github.com/matrix-org/matrix-spec-proposals/pull/4009) to expand the supported characters in Matrix IDs.
|
||||
1
changelog.d/15539.misc
Normal file
1
changelog.d/15539.misc
Normal file
@@ -0,0 +1 @@
|
||||
Proxy `/user/devices` federation queries to application services for [MSC3984](https://github.com/matrix-org/matrix-spec-proposals/pull/3984).
|
||||
1
changelog.d/15542.misc
Normal file
1
changelog.d/15542.misc
Normal file
@@ -0,0 +1 @@
|
||||
Factor out an `is_mine_server_name` method.
|
||||
1
changelog.d/15543.misc
Normal file
1
changelog.d/15543.misc
Normal file
@@ -0,0 +1 @@
|
||||
Allow running Complement tests using [podman](https://podman.io/) by adding a `PODMAN` environment variable to `scripts-dev/complement.sh`.
|
||||
1
changelog.d/15544.doc
Normal file
1
changelog.d/15544.doc
Normal file
@@ -0,0 +1 @@
|
||||
Clarify documentation of the "Create or modify account" Admin API.
|
||||
1
changelog.d/15545.misc
Normal file
1
changelog.d/15545.misc
Normal file
@@ -0,0 +1 @@
|
||||
Install the `xmlsec` and `mdbook` packages and switch back to the upstream [cachix/devenv](https://github.com/cachix/devenv) repo in the nix development environment.
|
||||
1
changelog.d/15548.misc
Normal file
1
changelog.d/15548.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump serde from 1.0.160 to 1.0.162.
|
||||
1
changelog.d/15549.misc
Normal file
1
changelog.d/15549.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump types-setuptools from 67.6.0.5 to 67.7.0.1.
|
||||
1
changelog.d/15550.misc
Normal file
1
changelog.d/15550.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump sentry-sdk from 1.19.1 to 1.22.1.
|
||||
1
changelog.d/15551.misc
Normal file
1
changelog.d/15551.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump ruff from 0.0.259 to 0.0.265.
|
||||
1
changelog.d/15552.misc
Normal file
1
changelog.d/15552.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump hiredis from 2.2.2 to 2.2.3.
|
||||
1
changelog.d/15553.misc
Normal file
1
changelog.d/15553.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump types-requests from 2.29.0.0 to 2.30.0.0.
|
||||
1
changelog.d/15554.bugfix
Normal file
1
changelog.d/15554.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Experimental support for [MSC4010](https://github.com/matrix-org/matrix-spec-proposals/pull/4010) which rejects setting the `"m.push_rules"` via account data.
|
||||
1
changelog.d/15555.bugfix
Normal file
1
changelog.d/15555.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Experimental support for [MSC4010](https://github.com/matrix-org/matrix-spec-proposals/pull/4010) which rejects setting the `"m.push_rules"` via account data.
|
||||
1
changelog.d/15560.doc
Normal file
1
changelog.d/15560.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fix path to the `statistics/database/rooms` admin API in documentation.
|
||||
72
debian/changelog
vendored
72
debian/changelog
vendored
@@ -1,3 +1,75 @@
|
||||
matrix-synapse-py3 (1.83.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.83.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 May 2023 18:13:37 +0200
|
||||
|
||||
matrix-synapse-py3 (1.83.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.83.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 May 2023 15:56:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.82.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.82.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Apr 2023 11:56:06 +0100
|
||||
|
||||
matrix-synapse-py3 (1.82.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.82.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Apr 2023 09:47:30 +0100
|
||||
|
||||
matrix-synapse-py3 (1.81.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.81.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Apr 2023 14:18:35 +0100
|
||||
|
||||
matrix-synapse-py3 (1.81.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.81.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 06 Apr 2023 16:07:54 +0100
|
||||
|
||||
matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.81.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Apr 2023 14:29:03 +0100
|
||||
|
||||
matrix-synapse-py3 (1.80.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Mar 2023 11:10:33 +0100
|
||||
|
||||
matrix-synapse-py3 (1.80.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 22 Mar 2023 08:30:16 -0700
|
||||
|
||||
matrix-synapse-py3 (1.80.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Mar 2023 10:56:08 -0700
|
||||
|
||||
matrix-synapse-py3 (1.79.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Mar 2023 16:14:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.79.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 13 Mar 2023 12:54:21 +0000
|
||||
|
||||
matrix-synapse-py3 (1.79.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0rc1.
|
||||
|
||||
@@ -46,7 +46,7 @@ for port in 8080 8081 8082; do
|
||||
echo ''
|
||||
|
||||
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
||||
# Please don't accidentaly bork me with your fancy settings.
|
||||
# Please don't accidentally bork me with your fancy settings.
|
||||
listeners=$(cat <<-PORTLISTENERS
|
||||
# Configure server to listen on both $https_port and $port
|
||||
# This overides some of the default settings above
|
||||
@@ -80,12 +80,8 @@ for port in 8080 8081 8082; do
|
||||
echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\""
|
||||
echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\""
|
||||
|
||||
# Ignore keys from the trusted keys server
|
||||
echo '# Ignore keys from the trusted keys server'
|
||||
echo 'trusted_key_servers:'
|
||||
echo ' - server_name: "matrix.org"'
|
||||
echo ' accept_keys_insecurely: true'
|
||||
echo ''
|
||||
# Request keys directly from servers contacted over federation
|
||||
echo 'trusted_key_servers: []'
|
||||
|
||||
# Allow the servers to communicate over localhost.
|
||||
allow_list=$(cat <<-ALLOW_LIST
|
||||
|
||||
20
dev-docs/Makefile
Normal file
20
dev-docs/Makefile
Normal file
@@ -0,0 +1,20 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
50
dev-docs/conf.py
Normal file
50
dev-docs/conf.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# For the full list of built-in configuration values, see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||
|
||||
project = "Synapse development"
|
||||
copyright = "2023, The Matrix.org Foundation C.I.C."
|
||||
author = "The Synapse Maintainers and Community"
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||
|
||||
extensions = [
|
||||
"autodoc2",
|
||||
"myst_parser",
|
||||
]
|
||||
|
||||
templates_path = ["_templates"]
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
|
||||
# -- Options for Autodoc2 ----------------------------------------------------
|
||||
|
||||
autodoc2_docstring_parser_regexes = [
|
||||
# this will render all docstrings as 'MyST' Markdown
|
||||
(r".*", "myst"),
|
||||
]
|
||||
|
||||
autodoc2_packages = [
|
||||
{
|
||||
"path": "../synapse",
|
||||
# Don't render documentation for everything as a matter of course
|
||||
"auto_mode": False,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
# -- Options for MyST (Markdown) ---------------------------------------------
|
||||
|
||||
# myst_heading_anchors = 2
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||
|
||||
html_theme = "furo"
|
||||
html_static_path = ["_static"]
|
||||
22
dev-docs/index.rst
Normal file
22
dev-docs/index.rst
Normal file
@@ -0,0 +1,22 @@
|
||||
.. Synapse Developer Documentation documentation master file, created by
|
||||
sphinx-quickstart on Mon Mar 13 08:59:51 2023.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to the Synapse Developer Documentation!
|
||||
===========================================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
modules/federation_sender
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
5
dev-docs/modules/federation_sender.md
Normal file
5
dev-docs/modules/federation_sender.md
Normal file
@@ -0,0 +1,5 @@
|
||||
Federation Sender
|
||||
=================
|
||||
|
||||
```{autodoc2-docstring} synapse.federation.sender
|
||||
```
|
||||
@@ -37,9 +37,24 @@ RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential git libffi-dev libssl-dev \
|
||||
build-essential curl git libffi-dev libssl-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install rust and ensure its in the PATH.
|
||||
# (Rust may be needed to compile `cryptography`---which is one of poetry's
|
||||
# dependencies---on platforms that don't have a `cryptography` wheel.
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||
# set to true, so we expose it as a build-arg.
|
||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
# synapse's dependencies.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
|
||||
@@ -51,8 +51,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
# -z True if the length of string is zero.
|
||||
if [[ -z "$SYNAPSE_WORKER_TYPES" ]]; then
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister, \
|
||||
event_persister, \
|
||||
event_persister:2, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
@@ -64,7 +63,8 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
synchrotron, \
|
||||
client_reader, \
|
||||
appservice, \
|
||||
pusher"
|
||||
pusher, \
|
||||
stream_writers=account_data+presence+receipts+to_device+typing"
|
||||
|
||||
fi
|
||||
log "Workers requested: $SYNAPSE_WORKER_TYPES"
|
||||
|
||||
@@ -19,8 +19,15 @@
|
||||
# The environment variables it reads are:
|
||||
# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
|
||||
# * SYNAPSE_REPORT_STATS: Whether to report stats.
|
||||
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKER_CONFIG
|
||||
# below. Leave empty for no workers.
|
||||
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKERS_CONFIG
|
||||
# below. Leave empty for no workers. Add a ':' and a number at the end to
|
||||
# multiply that worker. Append multiple worker types with '+' to merge the
|
||||
# worker types into a single worker. Add a name and a '=' to the front of a
|
||||
# worker type to give this instance a name in logs and nginx.
|
||||
# Examples:
|
||||
# SYNAPSE_WORKER_TYPES='event_persister, federation_sender, client_reader'
|
||||
# SYNAPSE_WORKER_TYPES='event_persister:2, federation_sender:2, client_reader'
|
||||
# SYNAPSE_WORKER_TYPES='stream_writers=account_data+presence+typing'
|
||||
# * SYNAPSE_AS_REGISTRATION_DIR: If specified, a directory in which .yaml and .yml files
|
||||
# will be treated as Application Service registration files.
|
||||
# * SYNAPSE_TLS_CERT: Path to a TLS certificate in PEM format.
|
||||
@@ -40,16 +47,33 @@
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional, Set
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Set,
|
||||
SupportsIndex,
|
||||
)
|
||||
|
||||
import yaml
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||
|
||||
# A simple name used as a placeholder in the WORKERS_CONFIG below. This will be replaced
|
||||
# during processing with the name of the worker.
|
||||
WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
||||
|
||||
# Workers with exposed endpoints needs either "client", "federation", or "media" listener_resources
|
||||
# Watching /_matrix/client needs a "client" listener
|
||||
# Watching /_matrix/federation needs a "federation" listener
|
||||
@@ -70,11 +94,13 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/user_directory/search$"
|
||||
],
|
||||
"shared_extra_conf": {"update_user_directory_from_worker": "user_dir1"},
|
||||
"shared_extra_conf": {
|
||||
"update_user_directory_from_worker": WORKER_PLACEHOLDER_NAME
|
||||
},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"media_repository": {
|
||||
"app": "synapse.app.media_repository",
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["media"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/media/",
|
||||
@@ -87,7 +113,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
# The first configured media worker will run the media background jobs
|
||||
"shared_extra_conf": {
|
||||
"enable_media_repo": False,
|
||||
"media_instance_running_background_jobs": "media_repository1",
|
||||
"media_instance_running_background_jobs": WORKER_PLACEHOLDER_NAME,
|
||||
},
|
||||
"worker_extra_conf": "enable_media_repo: true",
|
||||
},
|
||||
@@ -95,7 +121,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
"shared_extra_conf": {"notify_appservices_from_worker": "appservice1"},
|
||||
"shared_extra_conf": {
|
||||
"notify_appservices_from_worker": WORKER_PLACEHOLDER_NAME
|
||||
},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"federation_sender": {
|
||||
@@ -135,6 +163,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/versions$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register/available$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/auth/.*/fallback/web$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/messages$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event",
|
||||
@@ -143,6 +172,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
|
||||
"^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)",
|
||||
"^/_matrix/client/(r0|v3|unstable)/password_policy$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@@ -192,9 +224,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
# This worker cannot be sharded. Therefore there should only ever be one background
|
||||
# worker, and it should be named background_worker1
|
||||
"shared_extra_conf": {"run_background_tasks_on": "background_worker1"},
|
||||
# This worker cannot be sharded. Therefore, there should only ever be one
|
||||
# background worker. This is enforced for the safety of your database.
|
||||
"shared_extra_conf": {"run_background_tasks_on": WORKER_PLACEHOLDER_NAME},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"event_creator": {
|
||||
@@ -275,7 +307,7 @@ NGINX_LOCATION_CONFIG_BLOCK = """
|
||||
"""
|
||||
|
||||
NGINX_UPSTREAM_CONFIG_BLOCK = """
|
||||
upstream {upstream_worker_type} {{
|
||||
upstream {upstream_worker_base_name} {{
|
||||
{body}
|
||||
}}
|
||||
"""
|
||||
@@ -326,7 +358,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
|
||||
def add_worker_roles_to_shared_config(
|
||||
shared_config: dict,
|
||||
worker_type: str,
|
||||
worker_types_set: Set[str],
|
||||
worker_name: str,
|
||||
worker_port: int,
|
||||
) -> None:
|
||||
@@ -334,22 +366,36 @@ def add_worker_roles_to_shared_config(
|
||||
append appropriate worker information to it for the current worker_type instance.
|
||||
|
||||
Args:
|
||||
shared_config: The config dict that all worker instances share (after being converted to YAML)
|
||||
worker_type: The type of worker (one of those defined in WORKERS_CONFIG).
|
||||
shared_config: The config dict that all worker instances share (after being
|
||||
converted to YAML)
|
||||
worker_types_set: The type of worker (one of those defined in WORKERS_CONFIG).
|
||||
This list can be a single worker type or multiple.
|
||||
worker_name: The name of the worker instance.
|
||||
worker_port: The HTTP replication port that the worker instance is listening on.
|
||||
"""
|
||||
# The instance_map config field marks the workers that write to various replication streams
|
||||
# The instance_map config field marks the workers that write to various replication
|
||||
# streams
|
||||
instance_map = shared_config.setdefault("instance_map", {})
|
||||
|
||||
# Worker-type specific sharding config
|
||||
if worker_type == "pusher":
|
||||
# This is a list of the stream_writers that there can be only one of. Events can be
|
||||
# sharded, and therefore doesn't belong here.
|
||||
singular_stream_writers = [
|
||||
"account_data",
|
||||
"presence",
|
||||
"receipts",
|
||||
"to_device",
|
||||
"typing",
|
||||
]
|
||||
|
||||
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
||||
# roles, check each.
|
||||
if "pusher" in worker_types_set:
|
||||
shared_config.setdefault("pusher_instances", []).append(worker_name)
|
||||
|
||||
elif worker_type == "federation_sender":
|
||||
if "federation_sender" in worker_types_set:
|
||||
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
||||
|
||||
elif worker_type == "event_persister":
|
||||
if "event_persister" in worker_types_set:
|
||||
# Event persisters write to the events stream, so we need to update
|
||||
# the list of event stream writers
|
||||
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
||||
@@ -362,19 +408,154 @@ def add_worker_roles_to_shared_config(
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
elif worker_type in ["account_data", "presence", "receipts", "to_device", "typing"]:
|
||||
# Update the list of stream writers
|
||||
# It's convenient that the name of the worker type is the same as the stream to write
|
||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||
worker_type, []
|
||||
).append(worker_name)
|
||||
# Update the list of stream writers. It's convenient that the name of the worker
|
||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||
# is more than one.
|
||||
for worker in worker_types_set:
|
||||
if worker in singular_stream_writers:
|
||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||
worker, []
|
||||
).append(worker_name)
|
||||
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
# For now, all stream writers need http replication ports
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
# For now, all stream writers need http replication ports
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
|
||||
def merge_worker_template_configs(
|
||||
existing_dict: Optional[Dict[str, Any]],
|
||||
to_be_merged_dict: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
"""When given an existing dict of worker template configuration consisting with both
|
||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
||||
return new dict.
|
||||
|
||||
Args:
|
||||
existing_dict: Either an existing worker template or a fresh blank one.
|
||||
to_be_merged_dict: The template from WORKERS_CONFIGS to be merged into
|
||||
existing_dict.
|
||||
Returns: The newly merged together dict values.
|
||||
"""
|
||||
new_dict: Dict[str, Any] = {}
|
||||
if not existing_dict:
|
||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
||||
new_dict = to_be_merged_dict.copy()
|
||||
else:
|
||||
for i in to_be_merged_dict.keys():
|
||||
if (i == "endpoint_patterns") or (i == "listener_resources"):
|
||||
# merge the two lists, remove duplicates
|
||||
new_dict[i] = list(set(existing_dict[i] + to_be_merged_dict[i]))
|
||||
elif i == "shared_extra_conf":
|
||||
# merge dictionary's, the worker name will be replaced later
|
||||
new_dict[i] = {**existing_dict[i], **to_be_merged_dict[i]}
|
||||
elif i == "worker_extra_conf":
|
||||
# There is only one worker type that has a 'worker_extra_conf' and it is
|
||||
# the media_repo. Since duplicate worker types on the same worker don't
|
||||
# work, this is fine.
|
||||
new_dict[i] = existing_dict[i] + to_be_merged_dict[i]
|
||||
else:
|
||||
# Everything else should be identical, like "app", which only works
|
||||
# because all apps are now generic_workers.
|
||||
new_dict[i] = to_be_merged_dict[i]
|
||||
return new_dict
|
||||
|
||||
|
||||
def insert_worker_name_for_worker_config(
|
||||
existing_dict: Dict[str, Any], worker_name: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Insert a given worker name into the worker's configuration dict.
|
||||
|
||||
Args:
|
||||
existing_dict: The worker_config dict that is imported into shared_config.
|
||||
worker_name: The name of the worker to insert.
|
||||
Returns: Copy of the dict with newly inserted worker name
|
||||
"""
|
||||
dict_to_edit = existing_dict.copy()
|
||||
for k, v in dict_to_edit["shared_extra_conf"].items():
|
||||
# Only proceed if it's the placeholder name string
|
||||
if v == WORKER_PLACEHOLDER_NAME:
|
||||
dict_to_edit["shared_extra_conf"][k] = worker_name
|
||||
return dict_to_edit
|
||||
|
||||
|
||||
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
||||
"""
|
||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
||||
checking.
|
||||
|
||||
Args:
|
||||
worker_types: The unprocessed List of requested workers
|
||||
Returns:
|
||||
A new list with all requested workers expanded.
|
||||
"""
|
||||
# Checking performed:
|
||||
# 1. if worker:2 or more is declared, it will create additional workers up to number
|
||||
# 2. if worker:1, it will create a single copy of this worker as if no number was
|
||||
# given
|
||||
# 3. if worker:0 is declared, this worker will be ignored. This is to allow for
|
||||
# scripting and automated expansion and is intended behaviour.
|
||||
# 4. if worker:NaN or is a negative number, it will error and log it.
|
||||
new_worker_types = []
|
||||
for worker_type in worker_types:
|
||||
if ":" in worker_type:
|
||||
worker_type_components = split_and_strip_string(worker_type, ":", 1)
|
||||
worker_count = 0
|
||||
# Should only be 2 components, a type of worker(s) and an integer as a
|
||||
# string. Cast the number as an int then it can be used as a counter.
|
||||
try:
|
||||
worker_count = int(worker_type_components[1])
|
||||
except ValueError:
|
||||
error(
|
||||
f"Bad number in worker count for '{worker_type}': "
|
||||
f"'{worker_type_components[1]}' is not an integer"
|
||||
)
|
||||
|
||||
# As long as there are more than 0, we add one to the list to make below.
|
||||
for _ in range(worker_count):
|
||||
new_worker_types.append(worker_type_components[0])
|
||||
|
||||
else:
|
||||
# If it's not a real worker_type, it will error out later.
|
||||
new_worker_types.append(worker_type)
|
||||
return new_worker_types
|
||||
|
||||
|
||||
def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
||||
"""Helper to check to make sure worker types that cannot have multiples do not.
|
||||
|
||||
Args:
|
||||
worker_type: The type of worker to check against.
|
||||
Returns: True if allowed, False if not
|
||||
"""
|
||||
return worker_type not in [
|
||||
"background_worker",
|
||||
"account_data",
|
||||
"presence",
|
||||
"receipts",
|
||||
"typing",
|
||||
"to_device",
|
||||
]
|
||||
|
||||
|
||||
def split_and_strip_string(
|
||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
||||
) -> List[str]:
|
||||
"""
|
||||
Helper to split a string on split_char and strip whitespace from each end of each
|
||||
element.
|
||||
Args:
|
||||
given_string: The string to split
|
||||
split_char: The character to split the string on
|
||||
max_split: kwarg for split() to limit how many times the split() happens
|
||||
Returns:
|
||||
A List of strings
|
||||
"""
|
||||
# Removes whitespace from ends of result strings before adding to list. Allow for
|
||||
# overriding 'maxsplit' kwarg, default being -1 to signify no maximum.
|
||||
return [x.strip() for x in given_string.split(split_char, maxsplit=max_split)]
|
||||
|
||||
|
||||
def generate_base_homeserver_config() -> None:
|
||||
@@ -389,29 +570,153 @@ def generate_base_homeserver_config() -> None:
|
||||
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||
|
||||
|
||||
def parse_worker_types(
|
||||
requested_worker_types: List[str],
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""Read the desired list of requested workers and prepare the data for use in
|
||||
generating worker config files while also checking for potential gotchas.
|
||||
|
||||
Args:
|
||||
requested_worker_types: The list formed from the split environment variable
|
||||
containing the unprocessed requests for workers.
|
||||
|
||||
Returns: A dict of worker names to set of worker types. Format:
|
||||
{'worker_name':
|
||||
{'worker_type', 'worker_type2'}
|
||||
}
|
||||
"""
|
||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
||||
# worker when generating its config file, as each worker's name is just
|
||||
# worker_base_name followed by instance number
|
||||
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
||||
|
||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
||||
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
||||
|
||||
# The final result of all this processing
|
||||
dict_to_return: Dict[str, Set[str]] = {}
|
||||
|
||||
# Handle any multipliers requested for given workers.
|
||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
||||
requested_worker_types
|
||||
)
|
||||
|
||||
# Process each worker_type_string
|
||||
# Examples of expected formats:
|
||||
# - requested_name=type1+type2+type3
|
||||
# - synchrotron
|
||||
# - event_creator+event_persister
|
||||
for worker_type_string in multiple_processed_worker_types:
|
||||
# First, if a name is requested, use that — otherwise generate one.
|
||||
worker_base_name: str = ""
|
||||
if "=" in worker_type_string:
|
||||
# Split on "=", remove extra whitespace from ends then make list
|
||||
worker_type_split = split_and_strip_string(worker_type_string, "=")
|
||||
if len(worker_type_split) > 2:
|
||||
error(
|
||||
"There should only be one '=' in the worker type string. "
|
||||
f"Please fix: {worker_type_string}"
|
||||
)
|
||||
|
||||
# Assign the name
|
||||
worker_base_name = worker_type_split[0]
|
||||
|
||||
if not re.match(r"^[a-zA-Z0-9_+-]*[a-zA-Z_+-]$", worker_base_name):
|
||||
# Apply a fairly narrow regex to the worker names. Some characters
|
||||
# aren't safe for use in file paths or nginx configurations.
|
||||
# Don't allow to end with a number because we'll add a number
|
||||
# ourselves in a moment.
|
||||
error(
|
||||
"Invalid worker name; please choose a name consisting of "
|
||||
"alphanumeric letters, _ + -, but not ending with a digit: "
|
||||
f"{worker_base_name!r}"
|
||||
)
|
||||
|
||||
# Continue processing the remainder of the worker_type string
|
||||
# with the name override removed.
|
||||
worker_type_string = worker_type_split[1]
|
||||
|
||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
||||
# the list a set so it's deduplicated.
|
||||
worker_types_set: Set[str] = set(
|
||||
split_and_strip_string(worker_type_string, "+")
|
||||
)
|
||||
|
||||
if not worker_base_name:
|
||||
# No base name specified: generate one deterministically from set of
|
||||
# types
|
||||
worker_base_name = "+".join(sorted(worker_types_set))
|
||||
|
||||
# At this point, we have:
|
||||
# worker_base_name which is the name for the worker, without counter.
|
||||
# worker_types_set which is the set of worker types for this worker.
|
||||
|
||||
# Validate worker_type and make sure we don't allow sharding for a worker type
|
||||
# that doesn't support it. Will error and stop if it is a problem,
|
||||
# e.g. 'background_worker'.
|
||||
for worker_type in worker_types_set:
|
||||
# Verify this is a real defined worker type. If it's not, stop everything so
|
||||
# it can be fixed.
|
||||
if worker_type not in WORKERS_CONFIG:
|
||||
error(
|
||||
f"{worker_type} is an unknown worker type! Was found in "
|
||||
f"'{worker_type_string}'. Please fix!"
|
||||
)
|
||||
|
||||
if worker_type in worker_type_shard_counter:
|
||||
if not is_sharding_allowed_for_worker_type(worker_type):
|
||||
error(
|
||||
f"There can be only a single worker with {worker_type} "
|
||||
"type. Please recount and remove."
|
||||
)
|
||||
# Not in shard counter, must not have seen it yet, add it.
|
||||
worker_type_shard_counter[worker_type] += 1
|
||||
|
||||
# Generate the number for the worker using incrementing counter
|
||||
worker_base_name_counter[worker_base_name] += 1
|
||||
worker_number = worker_base_name_counter[worker_base_name]
|
||||
worker_name = f"{worker_base_name}{worker_number}"
|
||||
|
||||
if worker_number > 1:
|
||||
# If this isn't the first worker, check that we don't have a confusing
|
||||
# mixture of worker types with the same base name.
|
||||
first_worker_with_base_name = dict_to_return[f"{worker_base_name}1"]
|
||||
if first_worker_with_base_name != worker_types_set:
|
||||
error(
|
||||
f"Can not use worker_name: '{worker_name}' for worker_type(s): "
|
||||
f"{worker_types_set!r}. It is already in use by "
|
||||
f"worker_type(s): {first_worker_with_base_name!r}"
|
||||
)
|
||||
|
||||
dict_to_return[worker_name] = worker_types_set
|
||||
|
||||
return dict_to_return
|
||||
|
||||
|
||||
def generate_worker_files(
|
||||
environ: Mapping[str, str], config_path: str, data_dir: str
|
||||
environ: Mapping[str, str],
|
||||
config_path: str,
|
||||
data_dir: str,
|
||||
requested_worker_types: Dict[str, Set[str]],
|
||||
) -> None:
|
||||
"""Read the desired list of workers from environment variables and generate
|
||||
shared homeserver, nginx and supervisord configs.
|
||||
"""Read the desired workers(if any) that is passed in and generate shared
|
||||
homeserver, nginx and supervisord configs.
|
||||
|
||||
Args:
|
||||
environ: os.environ instance.
|
||||
config_path: The location of the generated Synapse main worker config file.
|
||||
data_dir: The location of the synapse data directory. Where log and
|
||||
user-facing config files live.
|
||||
requested_worker_types: A Dict containing requested workers in the format of
|
||||
{'worker_name1': {'worker_type', ...}}
|
||||
"""
|
||||
# Note that yaml cares about indentation, so care should be taken to insert lines
|
||||
# into files at the correct indentation below.
|
||||
|
||||
# shared_config is the contents of a Synapse config file that will be shared amongst
|
||||
# the main Synapse process as well as all workers.
|
||||
# It is intended mainly for disabling functionality when certain workers are spun up,
|
||||
# and adding a replication listener.
|
||||
|
||||
# First read the original config file and extract the listeners block. Then we'll add
|
||||
# another listener for replication. Later we'll write out the result to the shared
|
||||
# config file.
|
||||
# First read the original config file and extract the listeners block. Then we'll
|
||||
# add another listener for replication. Later we'll write out the result to the
|
||||
# shared config file.
|
||||
listeners = [
|
||||
{
|
||||
"port": 9093,
|
||||
@@ -427,9 +732,9 @@ def generate_worker_files(
|
||||
listeners += original_listeners
|
||||
|
||||
# The shared homeserver config. The contents of which will be inserted into the
|
||||
# base shared worker jinja2 template.
|
||||
#
|
||||
# This config file will be passed to all workers, included Synapse's main process.
|
||||
# base shared worker jinja2 template. This config file will be passed to all
|
||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
||||
# functionality when certain workers are spun up, and adding a replication listener.
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
|
||||
# List of dicts that describe workers.
|
||||
@@ -437,31 +742,20 @@ def generate_worker_files(
|
||||
# program blocks.
|
||||
worker_descriptors: List[Dict[str, Any]] = []
|
||||
|
||||
# Upstreams for load-balancing purposes. This dict takes the form of a worker type to the
|
||||
# ports of each worker. For example:
|
||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
||||
# type to the ports of each worker. For example:
|
||||
# {
|
||||
# worker_type: {1234, 1235, ...}}
|
||||
# }
|
||||
# and will be used to construct 'upstream' nginx directives.
|
||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
|
||||
# placed after the proxy_pass directive. The main benefit to representing this data as a
|
||||
# dict over a str is that we can easily deduplicate endpoints across multiple instances
|
||||
# of the same worker.
|
||||
#
|
||||
# An nginx site config that will be amended to depending on the workers that are
|
||||
# spun up. To be placed in /etc/nginx/conf.d.
|
||||
nginx_locations = {}
|
||||
|
||||
# Read the desired worker configuration from the environment
|
||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES", "").strip()
|
||||
if not worker_types_env:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = [x.strip() for x in worker_types_env.split(",")]
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
||||
# across multiple instances of the same worker. The final rendering will be combined
|
||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
||||
nginx_locations: Dict[str, str] = {}
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -469,66 +763,57 @@ def generate_worker_files(
|
||||
# Start worker ports from this arbitrary port
|
||||
worker_port = 18009
|
||||
|
||||
# A counter of worker_type -> int. Used for determining the name for a given
|
||||
# worker type when generating its config file, as each worker's name is just
|
||||
# worker_type + instance #
|
||||
worker_type_counter: Dict[str, int] = {}
|
||||
|
||||
# A list of internal endpoints to healthcheck, starting with the main process
|
||||
# which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
# For each worker type specified by the user, create config values
|
||||
for worker_type in worker_types:
|
||||
worker_config = WORKERS_CONFIG.get(worker_type)
|
||||
if worker_config:
|
||||
worker_config = worker_config.copy()
|
||||
else:
|
||||
error(worker_type + " is an unknown worker type! Please fix!")
|
||||
# Get the set of all worker types that we have configured
|
||||
all_worker_types_in_use = set(chain(*requested_worker_types.values()))
|
||||
# Map locations to upstreams (corresponding to worker types) in Nginx
|
||||
# but only if we use the appropriate worker type
|
||||
for worker_type in all_worker_types_in_use:
|
||||
for endpoint_pattern in WORKERS_CONFIG[worker_type]["endpoint_patterns"]:
|
||||
nginx_locations[endpoint_pattern] = f"http://{worker_type}"
|
||||
|
||||
new_worker_count = worker_type_counter.setdefault(worker_type, 0) + 1
|
||||
worker_type_counter[worker_type] = new_worker_count
|
||||
# For each worker type specified by the user, create config values and write it's
|
||||
# yaml config file
|
||||
for worker_name, worker_types_set in requested_worker_types.items():
|
||||
# The collected and processed data will live here.
|
||||
worker_config: Dict[str, Any] = {}
|
||||
|
||||
# Merge all worker config templates for this worker into a single config
|
||||
for worker_type in worker_types_set:
|
||||
copy_of_template_config = WORKERS_CONFIG[worker_type].copy()
|
||||
|
||||
# Merge worker type template configuration data. It's a combination of lists
|
||||
# and dicts, so use this helper.
|
||||
worker_config = merge_worker_template_configs(
|
||||
worker_config, copy_of_template_config
|
||||
)
|
||||
|
||||
# Replace placeholder names in the config template with the actual worker name.
|
||||
worker_config = insert_worker_name_for_worker_config(worker_config, worker_name)
|
||||
|
||||
# Name workers by their type concatenated with an incrementing number
|
||||
# e.g. federation_reader1
|
||||
worker_name = worker_type + str(new_worker_count)
|
||||
worker_config.update(
|
||||
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
||||
)
|
||||
|
||||
# Update the shared config with any worker-type specific options
|
||||
shared_config.update(worker_config["shared_extra_conf"])
|
||||
# Update the shared config with any worker_type specific options. The first of a
|
||||
# given worker_type needs to stay assigned and not be replaced.
|
||||
worker_config["shared_extra_conf"].update(shared_config)
|
||||
shared_config = worker_config["shared_extra_conf"]
|
||||
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Check if more than one instance of this worker type has been specified
|
||||
worker_type_total_count = worker_types.count(worker_type)
|
||||
|
||||
# Update the shared config with sharding-related options if necessary
|
||||
add_worker_roles_to_shared_config(
|
||||
shared_config, worker_type, worker_name, worker_port
|
||||
shared_config, worker_types_set, worker_name, worker_port
|
||||
)
|
||||
|
||||
# Enable the worker in supervisord
|
||||
worker_descriptors.append(worker_config)
|
||||
|
||||
# Add nginx location blocks for this worker's endpoints (if any are defined)
|
||||
for pattern in worker_config["endpoint_patterns"]:
|
||||
# Determine whether we need to load-balance this worker
|
||||
if worker_type_total_count > 1:
|
||||
# Create or add to a load-balanced upstream for this worker
|
||||
nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
|
||||
|
||||
# Upstreams are named after the worker_type
|
||||
upstream = "http://" + worker_type
|
||||
else:
|
||||
upstream = "http://localhost:%d" % (worker_port,)
|
||||
|
||||
# Note that this endpoint should proxy to this upstream
|
||||
nginx_locations[pattern] = upstream
|
||||
|
||||
# Write out the worker's logging config file
|
||||
|
||||
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
||||
|
||||
# Then a worker config file
|
||||
@@ -539,6 +824,10 @@ def generate_worker_files(
|
||||
worker_log_config_filepath=log_config_filepath,
|
||||
)
|
||||
|
||||
# Save this worker's port number to the correct nginx upstreams
|
||||
for worker_type in worker_types_set:
|
||||
nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
|
||||
|
||||
worker_port += 1
|
||||
|
||||
# Build the nginx location config blocks
|
||||
@@ -551,15 +840,14 @@ def generate_worker_files(
|
||||
|
||||
# Determine the load-balancing upstreams to configure
|
||||
nginx_upstream_config = ""
|
||||
|
||||
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
||||
for upstream_worker_base_name, upstream_worker_ports in nginx_upstreams.items():
|
||||
body = ""
|
||||
for port in upstream_worker_ports:
|
||||
body += " server localhost:%d;\n" % (port,)
|
||||
body += f" server localhost:{port};\n"
|
||||
|
||||
# Add to the list of configured upstreams
|
||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||
upstream_worker_type=upstream_worker_type,
|
||||
upstream_worker_base_name=upstream_worker_base_name,
|
||||
body=body,
|
||||
)
|
||||
|
||||
@@ -580,7 +868,7 @@ def generate_worker_files(
|
||||
if reg_path.suffix.lower() in (".yaml", ".yml")
|
||||
]
|
||||
|
||||
workers_in_use = len(worker_types) > 0
|
||||
workers_in_use = len(requested_worker_types) > 0
|
||||
|
||||
# Shared homeserver config
|
||||
convert(
|
||||
@@ -678,13 +966,26 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
generate_base_homeserver_config()
|
||||
else:
|
||||
log("Base homeserver config exists—not regenerating")
|
||||
# This script may be run multiple times (mostly by Complement, see note at top of file).
|
||||
# Don't re-configure workers in this instance.
|
||||
# This script may be run multiple times (mostly by Complement, see note at top of
|
||||
# file). Don't re-configure workers in this instance.
|
||||
mark_filepath = "/conf/workers_have_been_configured"
|
||||
if not os.path.exists(mark_filepath):
|
||||
# Collect and validate worker_type requests
|
||||
# Read the desired worker configuration from the environment
|
||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES", "").strip()
|
||||
# Only process worker_types if they exist
|
||||
if not worker_types_env:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
requested_worker_types: Dict[str, Any] = {}
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
||||
requested_worker_types = parse_worker_types(worker_types)
|
||||
|
||||
# Always regenerate all other config files
|
||||
log("Generating worker config files")
|
||||
generate_worker_files(environ, config_path, data_dir)
|
||||
generate_worker_files(environ, config_path, data_dir, requested_worker_types)
|
||||
|
||||
# Mark workers as being configured
|
||||
with open(mark_filepath, "w") as f:
|
||||
|
||||
@@ -57,6 +57,7 @@
|
||||
- [Account Validity](admin_api/account_validity.md)
|
||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||
- [Event Reports](admin_api/event_reports.md)
|
||||
- [Experimental Features](admin_api/experimental_features.md)
|
||||
- [Media](admin_api/media_admin_api.md)
|
||||
- [Purge History](admin_api/purge_history_api.md)
|
||||
- [Register Users](admin_api/register_api.md)
|
||||
|
||||
55
docs/admin_api/experimental_features.md
Normal file
55
docs/admin_api/experimental_features.md
Normal file
@@ -0,0 +1,55 @@
|
||||
# Experimental Features API
|
||||
|
||||
This API allows a server administrator to enable or disable some experimental features on a per-user
|
||||
basis. The currently supported features are:
|
||||
- [MSC3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy
|
||||
presence state enabled
|
||||
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
|
||||
for another client
|
||||
- [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require
|
||||
UIA when first uploading cross-signing keys.
|
||||
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
|
||||
## Enabling/Disabling Features
|
||||
|
||||
This API allows a server administrator to enable experimental features for a given user. The request must
|
||||
provide a body containing the user id and listing the features to enable/disable in the following format:
|
||||
```json
|
||||
{
|
||||
"features": {
|
||||
"msc3026":true,
|
||||
"msc3881":true
|
||||
}
|
||||
}
|
||||
```
|
||||
where true is used to enable the feature, and false is used to disable the feature.
|
||||
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
PUT /_synapse/admin/v1/experimental_features/<user_id>
|
||||
```
|
||||
|
||||
## Listing Enabled Features
|
||||
|
||||
To list which features are enabled/disabled for a given user send a request to the following API:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/experimental_features/<user_id>
|
||||
```
|
||||
|
||||
It will return a list of possible features and indicate whether they are enabled or disabled for the
|
||||
user like so:
|
||||
```json
|
||||
{
|
||||
"features": {
|
||||
"msc3026": true,
|
||||
"msc3881": false,
|
||||
"msc3967": false
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -81,3 +81,52 @@ The following fields are returned in the JSON response body:
|
||||
- `user_id` - string - Fully-qualified user ID (ex. `@user:server.com`).
|
||||
* `next_token` - integer - Opaque value used for pagination. See above.
|
||||
* `total` - integer - Total number of users after filtering.
|
||||
|
||||
|
||||
# Get largest rooms by size in database
|
||||
|
||||
Returns the 10 largest rooms and an estimate of how much space in the database
|
||||
they are taking.
|
||||
|
||||
This does not include the size of any associated media associated with the room.
|
||||
|
||||
Returns an error on SQLite.
|
||||
|
||||
*Note:* This uses the planner statistics from PostgreSQL to do the estimates,
|
||||
which means that the returned information can vary widely from reality. However,
|
||||
it should be enough to get a rough idea of where database disk space is going.
|
||||
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/statistics/database/rooms
|
||||
```
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
```json
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
|
||||
"estimated_size": 47325417353
|
||||
}
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `rooms` - An array of objects, sorted by largest room first. Objects contain
|
||||
the following fields:
|
||||
- `room_id` - string - The room ID.
|
||||
- `estimated_size` - integer - Estimated disk space used in bytes by the room
|
||||
in the database.
|
||||
|
||||
|
||||
*Added in Synapse 1.83.0*
|
||||
|
||||
@@ -62,7 +62,7 @@ URL parameters:
|
||||
|
||||
- `user_id`: fully-qualified user id: for example, `@user:server.com`.
|
||||
|
||||
## Create or modify Account
|
||||
## Create or modify account
|
||||
|
||||
This API allows an administrator to create or modify a user account with a
|
||||
specific `user_id`.
|
||||
@@ -78,28 +78,29 @@ with a body of:
|
||||
```json
|
||||
{
|
||||
"password": "user_password",
|
||||
"displayname": "User",
|
||||
"logout_devices": false,
|
||||
"displayname": "Alice Marigold",
|
||||
"avatar_url": "mxc://example.com/abcde12345",
|
||||
"threepids": [
|
||||
{
|
||||
"medium": "email",
|
||||
"address": "<user_mail_1>"
|
||||
"address": "alice@example.com"
|
||||
},
|
||||
{
|
||||
"medium": "email",
|
||||
"address": "<user_mail_2>"
|
||||
"address": "alice@domain.org"
|
||||
}
|
||||
],
|
||||
"external_ids": [
|
||||
{
|
||||
"auth_provider": "<provider1>",
|
||||
"external_id": "<user_id_provider_1>"
|
||||
"auth_provider": "example",
|
||||
"external_id": "12345"
|
||||
},
|
||||
{
|
||||
"auth_provider": "<provider2>",
|
||||
"external_id": "<user_id_provider_2>"
|
||||
"auth_provider": "example2",
|
||||
"external_id": "abc54321"
|
||||
}
|
||||
],
|
||||
"avatar_url": "<avatar_url>",
|
||||
"admin": false,
|
||||
"deactivated": false,
|
||||
"user_type": null
|
||||
@@ -112,41 +113,51 @@ Returns HTTP status code:
|
||||
|
||||
URL parameters:
|
||||
|
||||
- `user_id`: fully-qualified user id: for example, `@user:server.com`.
|
||||
- `user_id` - A fully-qualified user id. For example, `@user:server.com`.
|
||||
|
||||
Body parameters:
|
||||
|
||||
- `password` - string, optional. If provided, the user's password is updated and all
|
||||
- `password` - **string**, optional. If provided, the user's password is updated and all
|
||||
devices are logged out, unless `logout_devices` is set to `false`.
|
||||
- `logout_devices` - bool, optional, defaults to `true`. If set to false, devices aren't
|
||||
- `logout_devices` - **bool**, optional, defaults to `true`. If set to `false`, devices aren't
|
||||
logged out even when `password` is provided.
|
||||
- `displayname` - string, optional, defaults to the value of `user_id`.
|
||||
- `threepids` - array, optional, allows setting the third-party IDs (email, msisdn)
|
||||
- `medium` - string. Kind of third-party ID, either `email` or `msisdn`.
|
||||
- `address` - string. Value of third-party ID.
|
||||
belonging to a user.
|
||||
- `external_ids` - array, optional. Allow setting the identifier of the external identity
|
||||
provider for SSO (Single sign-on). Details in the configuration manual under the
|
||||
sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers).
|
||||
- `auth_provider` - string. ID of the external identity provider. Value of `idp_id`
|
||||
in the homeserver configuration. Note that no error is raised if the provided
|
||||
value is not in the homeserver configuration.
|
||||
- `external_id` - string, user ID in the external identity provider.
|
||||
- `avatar_url` - string, optional, must be a
|
||||
- `displayname` - **string**, optional. If set to an empty string (`""`), the user's display name
|
||||
will be removed.
|
||||
- `avatar_url` - **string**, optional. Must be a
|
||||
[MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris).
|
||||
- `admin` - bool, optional, defaults to `false`.
|
||||
- `deactivated` - bool, optional. If unspecified, deactivation state will be left
|
||||
unchanged on existing accounts and set to `false` for new accounts.
|
||||
A user cannot be erased by deactivating with this API. For details on
|
||||
deactivating users see [Deactivate Account](#deactivate-account).
|
||||
- `user_type` - string or null, optional. If provided, the user type will be
|
||||
adjusted. If `null` given, the user type will be cleared. Other
|
||||
allowed options are: `bot` and `support`.
|
||||
If set to an empty string (`""`), the user's avatar is removed.
|
||||
- `threepids` - **array**, optional. If provided, the user's third-party IDs (email, msisdn) are
|
||||
entirely replaced with the given list. Each item in the array is an object with the following
|
||||
fields:
|
||||
- `medium` - **string**, required. The type of third-party ID, either `email` or `msisdn` (phone number).
|
||||
- `address` - **string**, required. The third-party ID itself, e.g. `alice@example.com` for `email` or
|
||||
`447470274584` (for a phone number with country code "44") and `19254857364` (for a phone number
|
||||
with country code "1") for `msisdn`.
|
||||
Note: If a threepid is removed from a user via this option, Synapse will also attempt to remove
|
||||
that threepid from any identity servers it is aware has a binding for it.
|
||||
- `external_ids` - **array**, optional. Allow setting the identifier of the external identity
|
||||
provider for SSO (Single sign-on). More details are in the configuration manual under the
|
||||
sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers).
|
||||
- `auth_provider` - **string**, required. The unique, internal ID of the external identity provider.
|
||||
The same as `idp_id` from the homeserver configuration. Note that no error is raised if the
|
||||
provided value is not in the homeserver configuration.
|
||||
- `external_id` - **string**, required. An identifier for the user in the external identity provider.
|
||||
When the user logs in to the identity provider, this must be the unique ID that they map to.
|
||||
- `admin` - **bool**, optional, defaults to `false`. Whether the user is a homeserver administrator,
|
||||
granting them access to the Admin API, among other things.
|
||||
- `deactivated` - **bool**, optional. If unspecified, deactivation state will be left unchanged.
|
||||
|
||||
If the user already exists then optional parameters default to the current value.
|
||||
Note: the `password` field must also be set if both of the following are true:
|
||||
- `deactivated` is set to `false` and the user was previously deactivated (you are reactivating this user)
|
||||
- Users are allowed to set their password on this homeserver (both `password_config.enabled` and
|
||||
`password_config.localdb_enabled` config options are set to `true`).
|
||||
Users' passwords are wiped upon account deactivation, hence the need to set a new one here.
|
||||
|
||||
In order to re-activate an account `deactivated` must be set to `false`. If
|
||||
users do not login via single-sign-on, a new `password` must be provided.
|
||||
Note: a user cannot be erased with this API. For more details on
|
||||
deactivating and erasing users see [Deactivate Account](#deactivate-account).
|
||||
- `user_type` - **string** or null, optional. If not provided, the user type will be
|
||||
not be changed. If `null` is given, the user type will be cleared.
|
||||
Other allowed options are: `bot` and `support`.
|
||||
|
||||
## List Accounts
|
||||
|
||||
|
||||
@@ -346,6 +346,7 @@ The above will run a monolithic (single-process) Synapse with SQLite as the data
|
||||
A safe example would be `WORKER_TYPES="federation_inbound, federation_sender, synchrotron"`.
|
||||
See the [worker documentation](../workers.md) for additional information on workers.
|
||||
- Passing `ASYNCIO_REACTOR=1` as an environment variable to use the Twisted asyncio reactor instead of the default one.
|
||||
- Passing `PODMAN=1` will use the [podman](https://podman.io/) container runtime, instead of docker.
|
||||
|
||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`, e.g:
|
||||
```sh
|
||||
|
||||
@@ -155,43 +155,11 @@ def run_upgrade(
|
||||
Boolean columns require special treatment, since SQLite treats booleans the
|
||||
same as integers.
|
||||
|
||||
There are three separate aspects to this:
|
||||
|
||||
* Any new boolean column must be added to the `BOOLEAN_COLUMNS` list in
|
||||
Any new boolean column must be added to the `BOOLEAN_COLUMNS` list in
|
||||
`synapse/_scripts/synapse_port_db.py`. This tells the port script to cast
|
||||
the integer value from SQLite to a boolean before writing the value to the
|
||||
postgres database.
|
||||
|
||||
* Before SQLite 3.23, `TRUE` and `FALSE` were not recognised as constants by
|
||||
SQLite, and the `IS [NOT] TRUE`/`IS [NOT] FALSE` operators were not
|
||||
supported. This makes it necessary to avoid using `TRUE` and `FALSE`
|
||||
constants in SQL commands.
|
||||
|
||||
For example, to insert a `TRUE` value into the database, write:
|
||||
|
||||
```python
|
||||
txn.execute("INSERT INTO tbl(col) VALUES (?)", (True, ))
|
||||
```
|
||||
|
||||
* Default values for new boolean columns present a particular
|
||||
difficulty. Generally it is best to create separate schema files for
|
||||
Postgres and SQLite. For example:
|
||||
|
||||
```sql
|
||||
# in 00delta.sql.postgres:
|
||||
ALTER TABLE tbl ADD COLUMN col BOOLEAN DEFAULT FALSE;
|
||||
```
|
||||
|
||||
```sql
|
||||
# in 00delta.sql.sqlite:
|
||||
ALTER TABLE tbl ADD COLUMN col BOOLEAN DEFAULT 0;
|
||||
```
|
||||
|
||||
Note that there is a particularly insidious failure mode here: the Postgres
|
||||
flavour will be accepted by SQLite 3.22, but will give a column whose
|
||||
default value is the **string** `"FALSE"` - which, when cast back to a boolean
|
||||
in Python, evaluates to `True`.
|
||||
|
||||
|
||||
## `event_id` global uniqueness
|
||||
|
||||
|
||||
@@ -103,6 +103,9 @@ Called during a logout request for a user. It is passed the qualified user ID, t
|
||||
deactivated device (if any: access tokens are occasionally created without an associated
|
||||
device ID), and the (now deactivated) access token.
|
||||
|
||||
Deleting the related pushers is done after calling `on_logged_out`, so you can rely on them
|
||||
to still be present.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `get_username_for_registration`
|
||||
|
||||
@@ -26,8 +26,8 @@ for most users.
|
||||
#### Docker images and Ansible playbooks
|
||||
|
||||
There is an official synapse image available at
|
||||
<https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
|
||||
the docker-compose file available at
|
||||
<https://hub.docker.com/r/matrixdotorg/synapse> or at [`ghcr.io/matrix-org/synapse`](https://ghcr.io/matrix-org/synapse)
|
||||
which can be used with the docker-compose file available at
|
||||
[contrib/docker](https://github.com/matrix-org/synapse/tree/develop/contrib/docker).
|
||||
Further information on this including configuration options is available in the README
|
||||
on hub.docker.com.
|
||||
|
||||
@@ -25,7 +25,7 @@ position of all streams. The server then periodically sends `RDATA` commands
|
||||
which have the format `RDATA <stream_name> <instance_name> <token> <row>`, where
|
||||
the format of `<row>` is defined by the individual streams. The
|
||||
`<instance_name>` is the name of the Synapse process that generated the data
|
||||
(usually "master").
|
||||
(usually "master"). We expect an RDATA for every row in the DB.
|
||||
|
||||
Error reporting happens by either the client or server sending an ERROR
|
||||
command, and usually the connection will be closed.
|
||||
@@ -107,7 +107,7 @@ reconnect, following the steps above.
|
||||
If the server sends messages faster than the client can consume them the
|
||||
server will first buffer a (fairly large) number of commands and then
|
||||
disconnect the client. This ensures that we don't queue up an unbounded
|
||||
number of commands in memory and gives us a potential oppurtunity to
|
||||
number of commands in memory and gives us a potential opportunity to
|
||||
squawk loudly. When/if the client recovers it can reconnect to the
|
||||
server and ask for missed messages.
|
||||
|
||||
@@ -122,7 +122,7 @@ since these include tokens which can be used to restart the stream on
|
||||
connection errors.
|
||||
|
||||
The client should keep track of the token in the last RDATA command
|
||||
received for each stream so that on reconneciton it can start streaming
|
||||
received for each stream so that on reconnection it can start streaming
|
||||
from the correct place. Note: not all RDATA have valid tokens due to
|
||||
batching. See `RdataCommand` for more details.
|
||||
|
||||
@@ -188,7 +188,8 @@ client (C):
|
||||
Two positions are included, the "new" position and the last position sent respectively.
|
||||
This allows servers to tell instances that the positions have advanced but no
|
||||
data has been written, without clients needlessly checking to see if they
|
||||
have missed any updates.
|
||||
have missed any updates. Instances will only fetch stuff if there is a gap between
|
||||
their current position and the given last position.
|
||||
|
||||
#### ERROR (S, C)
|
||||
|
||||
|
||||
@@ -88,6 +88,34 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.81.0
|
||||
|
||||
## Application service path & authentication deprecations
|
||||
|
||||
Synapse now attempts the versioned appservice paths before falling back to the
|
||||
[legacy paths](https://spec.matrix.org/v1.6/application-service-api/#legacy-routes).
|
||||
Usage of the legacy routes should be considered deprecated.
|
||||
|
||||
Additionally, Synapse has supported sending the application service access token
|
||||
via [the `Authorization` header](https://spec.matrix.org/v1.6/application-service-api/#authorization)
|
||||
since v1.70.0. For backwards compatibility it is *also* sent as the `access_token`
|
||||
query parameter. This is insecure and should be considered deprecated.
|
||||
|
||||
A future version of Synapse (v1.88.0 or later) will remove support for legacy
|
||||
application service routes and query parameter authorization.
|
||||
|
||||
# Upgrading to v1.80.0
|
||||
|
||||
## Reporting events error code change
|
||||
|
||||
Before this update, the
|
||||
[`POST /_matrix/client/v3/rooms/{roomId}/report/{eventId}`](https://spec.matrix.org/v1.6/client-server-api/#post_matrixclientv3roomsroomidreporteventid)
|
||||
endpoint would return a `403` if a user attempted to report an event that they did not have access to.
|
||||
This endpoint will now return a `404` in this case instead.
|
||||
|
||||
Clients that implement event reporting should check that their error handling code will handle this
|
||||
change.
|
||||
|
||||
# Upgrading to v1.79.0
|
||||
|
||||
## The `on_threepid_bind` module callback method has been deprecated
|
||||
@@ -171,6 +199,17 @@ Docker images and Debian packages need nothing specific as they already
|
||||
include or specify ICU as an explicit dependency.
|
||||
|
||||
|
||||
## User directory rebuild
|
||||
|
||||
Synapse 1.74 queues a background update
|
||||
[to rebuild the user directory](https://github.com/matrix-org/synapse/pull/14643),
|
||||
in order to fix missing or erroneous entries.
|
||||
|
||||
When this update begins, the user directory will be cleared out and rebuilt from
|
||||
scratch. User directory lookups will be incomplete until the rebuild completes.
|
||||
Admins can monitor the rebuild's progress by using the
|
||||
[Background update Admin API](usage/administration/admin_api/background_updates.md#status).
|
||||
|
||||
# Upgrading to v1.73.0
|
||||
|
||||
## Legacy Prometheus metric names have now been removed
|
||||
|
||||
@@ -577,6 +577,10 @@ delete any device that hasn't been accessed for more than the specified amount o
|
||||
|
||||
Defaults to no duration, which means devices are never pruned.
|
||||
|
||||
**Note:** This task will always run on the main process, regardless of the value of
|
||||
`run_background_tasks_on`. This is due to workers currently not having the ability to
|
||||
delete devices.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
delete_stale_devices_after: 1y
|
||||
@@ -1521,7 +1525,7 @@ This option specifies several limits for login:
|
||||
address. Defaults to `per_second: 0.003`, `burst_count: 5`.
|
||||
|
||||
* `account` ratelimits login requests based on the account the
|
||||
client is attempting to log into. Defaults to `per_second: 0.03`,
|
||||
client is attempting to log into. Defaults to `per_second: 0.003`,
|
||||
`burst_count: 5`.
|
||||
|
||||
* `failed_attempts` ratelimits login requests based on the account the
|
||||
@@ -1764,6 +1768,30 @@ Example configuration:
|
||||
max_image_pixels: 35M
|
||||
```
|
||||
---
|
||||
### `prevent_media_downloads_from`
|
||||
|
||||
A list of domains to never download media from. Media from these
|
||||
domains that is already downloaded will not be deleted, but will be
|
||||
inaccessible to users. This option does not affect admin APIs trying
|
||||
to download/operate on media.
|
||||
|
||||
This will not prevent the listed domains from accessing media themselves.
|
||||
It simply prevents users on this server from downloading media originating
|
||||
from the listed servers.
|
||||
|
||||
This will have no effect on media originating from the local server.
|
||||
This only affects media downloaded from other Matrix servers, to
|
||||
block domains from URL previews see [`url_preview_url_blacklist`](#url_preview_url_blacklist).
|
||||
|
||||
Defaults to an empty list (nothing blocked).
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
prevent_media_downloads_from:
|
||||
- evil.example.org
|
||||
- evil2.example.org
|
||||
```
|
||||
---
|
||||
### `dynamic_thumbnails`
|
||||
|
||||
Whether to generate new thumbnails on the fly to precisely match
|
||||
@@ -3100,6 +3128,11 @@ Options for each entry include:
|
||||
match a pre-existing account instead of failing. This could be used if
|
||||
switching from password logins to OIDC. Defaults to false.
|
||||
|
||||
* `enable_registration`: set to 'false' to disable automatic registration of new
|
||||
users. This allows the OIDC SSO flow to be limited to sign in only, rather than
|
||||
automatically registering users that have a valid SSO login but do not have
|
||||
a pre-registered account. Defaults to true.
|
||||
|
||||
* `user_mapping_provider`: Configuration for how attributes returned from a OIDC
|
||||
provider are mapped onto a matrix user. This setting has the following
|
||||
sub-properties:
|
||||
@@ -3216,6 +3249,7 @@ oidc_providers:
|
||||
userinfo_endpoint: "https://accounts.example.com/userinfo"
|
||||
jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||
skip_verification: true
|
||||
enable_registration: true
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_claim: "id"
|
||||
@@ -3432,6 +3466,9 @@ This option has a number of sub-options. They are as follows:
|
||||
user has unread messages in. Defaults to true, meaning push clients will see the number of
|
||||
rooms with unread messages in them. Set to false to instead send the number
|
||||
of unread messages.
|
||||
* `jitter_delay`: Delays push notifications by a random amount up to the given
|
||||
duration. Useful for mitigating timing attacks. Optional, defaults to no
|
||||
delay. _Added in Synapse 1.84.0._
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -3439,6 +3476,7 @@ push:
|
||||
enabled: true
|
||||
include_content: false
|
||||
group_unread_count_by_room: false
|
||||
jitter_delay: "10s"
|
||||
```
|
||||
---
|
||||
## Rooms
|
||||
@@ -3685,6 +3723,16 @@ default_power_level_content_override:
|
||||
trusted_private_chat: null
|
||||
public_chat: null
|
||||
```
|
||||
---
|
||||
### `forget_rooms_on_leave`
|
||||
|
||||
Set to true to automatically forget rooms for users when they leave them, either
|
||||
normally or via a kick or ban. Defaults to false.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
forget_rooms_on_leave: false
|
||||
```
|
||||
|
||||
---
|
||||
## Opentracing
|
||||
|
||||
@@ -234,6 +234,8 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/search$
|
||||
^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$
|
||||
^/_matrix/client/(r0|v3|unstable)/capabilities$
|
||||
|
||||
# Encryption requests
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
||||
@@ -245,7 +247,9 @@ information.
|
||||
# Registration/login requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
||||
^/_matrix/client/(r0|v3|unstable)/register$
|
||||
^/_matrix/client/(r0|v3|unstable)/register/available$
|
||||
^/_matrix/client/v1/register/m.login.registration_token/validity$
|
||||
^/_matrix/client/(r0|v3|unstable)/password_policy$
|
||||
|
||||
# Event sending requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/redact
|
||||
@@ -321,8 +325,7 @@ load balancing can be done in different ways.
|
||||
|
||||
For `/sync` and `/initialSync` requests it will be more efficient if all
|
||||
requests from a particular user are routed to a single instance. This can
|
||||
be done e.g. in nginx via IP `hash $http_x_forwarded_for;` or via
|
||||
`hash $http_authorization consistent;` which contains the users access token.
|
||||
be done in reverse proxy by extracting username part from the users access token.
|
||||
|
||||
Admins may additionally wish to separate out `/sync`
|
||||
requests that have a `since` query parameter from those that don't (and
|
||||
@@ -331,6 +334,69 @@ when a user logs in on a new device and can be *very* resource intensive, so
|
||||
isolating these requests will stop them from interfering with other users ongoing
|
||||
syncs.
|
||||
|
||||
Example `nginx` configuration snippet that handles the cases above. This is just an
|
||||
example and probably requires some changes according to your particular setup:
|
||||
|
||||
```nginx
|
||||
# Choose sync worker based on the existence of "since" query parameter
|
||||
map $arg_since $sync {
|
||||
default synapse_sync;
|
||||
'' synapse_initial_sync;
|
||||
}
|
||||
|
||||
# Extract username from access token passed as URL parameter
|
||||
map $arg_access_token $accesstoken_from_urlparam {
|
||||
# Defaults to just passing back the whole accesstoken
|
||||
default $arg_access_token;
|
||||
# Try to extract username part from accesstoken URL parameter
|
||||
"~syt_(?<username>.*?)_.*" $username;
|
||||
}
|
||||
|
||||
# Extract username from access token passed as authorization header
|
||||
map $http_authorization $mxid_localpart {
|
||||
# Defaults to just passing back the whole accesstoken
|
||||
default $http_authorization;
|
||||
# Try to extract username part from accesstoken header
|
||||
"~Bearer syt_(?<username>.*?)_.*" $username;
|
||||
# if no authorization-header exist, try mapper for URL parameter "access_token"
|
||||
"" $accesstoken_from_urlparam;
|
||||
}
|
||||
|
||||
upstream synapse_initial_sync {
|
||||
# Use the username mapper result for hash key
|
||||
hash $mxid_localpart consistent;
|
||||
server 127.0.0.1:8016;
|
||||
server 127.0.0.1:8036;
|
||||
}
|
||||
|
||||
upstream synapse_sync {
|
||||
# Use the username mapper result for hash key
|
||||
hash $mxid_localpart consistent;
|
||||
server 127.0.0.1:8013;
|
||||
server 127.0.0.1:8037;
|
||||
server 127.0.0.1:8038;
|
||||
server 127.0.0.1:8039;
|
||||
}
|
||||
|
||||
# Sync initial/normal
|
||||
location ~ ^/_matrix/client/(r0|v3)/sync$ {
|
||||
proxy_pass http://$sync;
|
||||
}
|
||||
|
||||
# Normal sync
|
||||
location ~ ^/_matrix/client/(api/v1|r0|v3)/events$ {
|
||||
proxy_pass http://synapse_sync;
|
||||
}
|
||||
|
||||
# Initial_sync
|
||||
location ~ ^/_matrix/client/(api/v1|r0|v3)/initialSync$ {
|
||||
proxy_pass http://synapse_initial_sync;
|
||||
}
|
||||
location ~ ^/_matrix/client/(api/v1|r0|v3)/rooms/[^/]+/initialSync$ {
|
||||
proxy_pass http://synapse_initial_sync;
|
||||
}
|
||||
```
|
||||
|
||||
Federation and client requests can be balanced via simple round robin.
|
||||
|
||||
The inbound federation transaction request `^/_matrix/federation/v1/send/`
|
||||
|
||||
274
flake.lock
generated
Normal file
274
flake.lock
generated
Normal file
@@ -0,0 +1,274 @@
|
||||
{
|
||||
"nodes": {
|
||||
"devenv": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"nix": "nix",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"pre-commit-hooks": "pre-commit-hooks"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1683102061,
|
||||
"narHash": "sha256-kOphT6V0uQUlFNBP3GBjs7DAU7fyZGGqCs9ue1gNY6E=",
|
||||
"owner": "cachix",
|
||||
"repo": "devenv",
|
||||
"rev": "ff1f29e41756553174d596cafe3a9fa77595100b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "cachix",
|
||||
"ref": "main",
|
||||
"repo": "devenv",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"fenix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"rust-analyzer-src": "rust-analyzer-src"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1682490133,
|
||||
"narHash": "sha256-tR2Qx0uuk97WySpSSk4rGS/oH7xb5LykbjATcw1vw1I=",
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"rev": "4e9412753ab75ef0e038a5fe54a062fb44c27c6a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1673956053,
|
||||
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"locked": {
|
||||
"lastModified": 1667395993,
|
||||
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gitignore": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"devenv",
|
||||
"pre-commit-hooks",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1660459072,
|
||||
"narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"rev": "a20de23b925fd8264fd7fad6454652e142fd7f73",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"lowdown-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1633514407,
|
||||
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nix": {
|
||||
"inputs": {
|
||||
"lowdown-src": "lowdown-src",
|
||||
"nixpkgs": [
|
||||
"devenv",
|
||||
"nixpkgs"
|
||||
],
|
||||
"nixpkgs-regression": "nixpkgs-regression"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1676545802,
|
||||
"narHash": "sha256-EK4rZ+Hd5hsvXnzSzk2ikhStJnD63odF7SzsQ8CuSPU=",
|
||||
"owner": "domenkozar",
|
||||
"repo": "nix",
|
||||
"rev": "7c91803598ffbcfe4a55c44ac6d49b2cf07a527f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "domenkozar",
|
||||
"ref": "relaxed-flakes",
|
||||
"repo": "nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1678875422,
|
||||
"narHash": "sha256-T3o6NcQPwXjxJMn2shz86Chch4ljXgZn746c2caGxd8=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "126f49a01de5b7e35a43fd43f891ecf6d3a51459",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-regression": {
|
||||
"locked": {
|
||||
"lastModified": 1643052045,
|
||||
"narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-stable": {
|
||||
"locked": {
|
||||
"lastModified": 1673800717,
|
||||
"narHash": "sha256-SFHraUqLSu5cC6IxTprex/nTsI81ZQAtDvlBvGDWfnA=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "2f9fd351ec37f5d479556cd48be4ca340da59b8f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-22.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1682519441,
|
||||
"narHash": "sha256-Vsq/8NOtvW1AoC6shCBxRxZyMQ+LhvPuJT6ltbzuv+Y=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "7a32a141db568abde9bc389845949dc2a454dfd3",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "master",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"pre-commit-hooks": {
|
||||
"inputs": {
|
||||
"flake-compat": [
|
||||
"devenv",
|
||||
"flake-compat"
|
||||
],
|
||||
"flake-utils": "flake-utils",
|
||||
"gitignore": "gitignore",
|
||||
"nixpkgs": [
|
||||
"devenv",
|
||||
"nixpkgs"
|
||||
],
|
||||
"nixpkgs-stable": "nixpkgs-stable"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1678376203,
|
||||
"narHash": "sha256-3tyYGyC8h7fBwncLZy5nCUjTJPrHbmNwp47LlNLOHSM=",
|
||||
"owner": "cachix",
|
||||
"repo": "pre-commit-hooks.nix",
|
||||
"rev": "1a20b9708962096ec2481eeb2ddca29ed747770a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "cachix",
|
||||
"repo": "pre-commit-hooks.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"devenv": "devenv",
|
||||
"fenix": "fenix",
|
||||
"nixpkgs": "nixpkgs_2",
|
||||
"systems": "systems"
|
||||
}
|
||||
},
|
||||
"rust-analyzer-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1682426789,
|
||||
"narHash": "sha256-UqnLmJESRZE0tTEaGbRAw05Hm19TWIPA+R3meqi5I4w=",
|
||||
"owner": "rust-lang",
|
||||
"repo": "rust-analyzer",
|
||||
"rev": "943d2a8a1ca15e8b28a1f51f5a5c135e3728da04",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "rust-lang",
|
||||
"ref": "nightly",
|
||||
"repo": "rust-analyzer",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
204
flake.nix
Normal file
204
flake.nix
Normal file
@@ -0,0 +1,204 @@
|
||||
# A nix flake that sets up a complete Synapse development environment. Dependencies
|
||||
# for the SyTest (https://github.com/matrix-org/sytest) and Complement
|
||||
# (https://github.com/matrix-org/complement) Matrix homeserver test suites are also
|
||||
# installed automatically.
|
||||
#
|
||||
# You must have already installed nix (https://nixos.org) on your system to use this.
|
||||
# nix can be installed on Linux or MacOS; NixOS is not required. Windows is not
|
||||
# directly supported, but nix can be installed inside of WSL2 or even Docker
|
||||
# containers. Please refer to https://nixos.org/download for details.
|
||||
#
|
||||
# You must also enable support for flakes in Nix. See the following for how to
|
||||
# do so permanently: https://nixos.wiki/wiki/Flakes#Enable_flakes
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# With nix installed, navigate to the directory containing this flake and run
|
||||
# `nix develop --impure`. The `--impure` is necessary in order to store state
|
||||
# locally from "services", such as PostgreSQL and Redis.
|
||||
#
|
||||
# You should now be dropped into a new shell with all programs and dependencies
|
||||
# availabile to you!
|
||||
#
|
||||
# You can start up pre-configured, local PostgreSQL and Redis instances by
|
||||
# running: `devenv up`. To stop them, use Ctrl-C.
|
||||
#
|
||||
# A PostgreSQL database called 'synapse' will be set up for you, along with
|
||||
# a PostgreSQL user named 'synapse_user'.
|
||||
# The 'host' can be found by running `echo $PGHOST` with the development
|
||||
# shell activated. Use these values to configure your Synapse to connect
|
||||
# to the local PostgreSQL database. You do not need to specify a password.
|
||||
# https://matrix-org.github.io/synapse/latest/postgres
|
||||
#
|
||||
# All state (the venv, postgres and redis data and config) are stored in
|
||||
# .devenv/state. Deleting a file from here and then re-entering the shell
|
||||
# will recreate these files from scratch.
|
||||
#
|
||||
# You can exit the development shell by typing `exit`, or using Ctrl-D.
|
||||
#
|
||||
# If you would like this development environment to activate automatically
|
||||
# upon entering this directory in your terminal, first install `direnv`
|
||||
# (https://direnv.net/). Then run `echo 'use flake . --impure' >> .envrc` at
|
||||
# the root of the Synapse repo. Finally, run `direnv allow .` to allow the
|
||||
# contents of '.envrc' to run every time you enter this directory. Voilà!
|
||||
|
||||
{
|
||||
inputs = {
|
||||
# Use the master/unstable branch of nixpkgs. The latest stable, 22.11,
|
||||
# does not contain 'perl536Packages.NetAsyncHTTP', needed by Sytest.
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/master";
|
||||
# Output a development shell for x86_64/aarch64 Linux/Darwin (MacOS).
|
||||
systems.url = "github:nix-systems/default";
|
||||
# A development environment manager built on Nix. See https://devenv.sh.
|
||||
devenv.url = "github:cachix/devenv/main";
|
||||
# Rust toolchains and rust-analyzer nightly.
|
||||
fenix = {
|
||||
url = "github:nix-community/fenix";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, devenv, systems, ... } @ inputs:
|
||||
let
|
||||
forEachSystem = nixpkgs.lib.genAttrs (import systems);
|
||||
in {
|
||||
devShells = forEachSystem (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
in {
|
||||
# Everything is configured via devenv - a nix module for creating declarative
|
||||
# developer environments. See https://devenv.sh/reference/options/ for a list
|
||||
# of all possible options.
|
||||
default = devenv.lib.mkShell {
|
||||
inherit inputs pkgs;
|
||||
modules = [
|
||||
{
|
||||
# Make use of the Starship command prompt when this development environment
|
||||
# is manually activated (via `nix develop --impure`).
|
||||
# See https://starship.rs/ for details on the prompt itself.
|
||||
starship.enable = true;
|
||||
|
||||
# Configure packages to install.
|
||||
# Search for package names at https://search.nixos.org/packages?channel=unstable
|
||||
packages = with pkgs; [
|
||||
# Native dependencies for running Synapse.
|
||||
icu
|
||||
libffi
|
||||
libjpeg
|
||||
libpqxx
|
||||
libwebp
|
||||
libxml2
|
||||
libxslt
|
||||
sqlite
|
||||
|
||||
# Native dependencies for unit tests (SyTest also requires OpenSSL).
|
||||
openssl
|
||||
xmlsec
|
||||
|
||||
# Native dependencies for running Complement.
|
||||
olm
|
||||
|
||||
# For building the Synapse documentation website.
|
||||
mdbook
|
||||
];
|
||||
|
||||
# Install Python and manage a virtualenv with Poetry.
|
||||
languages.python.enable = true;
|
||||
languages.python.poetry.enable = true;
|
||||
# Automatically activate the poetry virtualenv upon entering the shell.
|
||||
languages.python.poetry.activate.enable = true;
|
||||
# Install all extra Python dependencies; this is needed to run the unit
|
||||
# tests and utilitise all Synapse features.
|
||||
languages.python.poetry.install.arguments = ["--extras all"];
|
||||
# Install the 'matrix-synapse' package from the local checkout.
|
||||
languages.python.poetry.install.installRootPackage = true;
|
||||
|
||||
# This is a work-around for NixOS systems. NixOS is special in
|
||||
# that you can have multiple versions of packages installed at
|
||||
# once, including your libc linker!
|
||||
#
|
||||
# Some binaries built for Linux expect those to be in a certain
|
||||
# filepath, but that is not the case on NixOS. In that case, we
|
||||
# force compiling those binaries locally instead.
|
||||
env.POETRY_INSTALLER_NO_BINARY = "ruff";
|
||||
|
||||
# Install dependencies for the additional programming languages
|
||||
# involved with Synapse development.
|
||||
#
|
||||
# * Rust is used for developing and running Synapse.
|
||||
# * Golang is needed to run the Complement test suite.
|
||||
# * Perl is needed to run the SyTest test suite.
|
||||
languages.go.enable = true;
|
||||
languages.rust.enable = true;
|
||||
languages.rust.version = "stable";
|
||||
languages.perl.enable = true;
|
||||
|
||||
# Postgres is needed to run Synapse with postgres support and
|
||||
# to run certain unit tests that require postgres.
|
||||
services.postgres.enable = true;
|
||||
|
||||
# On the first invocation of `devenv up`, create a database for
|
||||
# Synapse to store data in.
|
||||
services.postgres.initdbArgs = ["--locale=C" "--encoding=UTF8"];
|
||||
services.postgres.initialDatabases = [
|
||||
{ name = "synapse"; }
|
||||
];
|
||||
# Create a postgres user called 'synapse_user' which has ownership
|
||||
# over the 'synapse' database.
|
||||
services.postgres.initialScript = ''
|
||||
CREATE USER synapse_user;
|
||||
ALTER DATABASE synapse OWNER TO synapse_user;
|
||||
'';
|
||||
|
||||
# Redis is needed in order to run Synapse in worker mode.
|
||||
services.redis.enable = true;
|
||||
|
||||
# Define the perl modules we require to run SyTest.
|
||||
#
|
||||
# This list was compiled by cross-referencing https://metacpan.org/
|
||||
# with the modules defined in './cpanfile' and then finding the
|
||||
# corresponding nix packages on https://search.nixos.org/packages.
|
||||
#
|
||||
# This was done until `./install-deps.pl --dryrun` produced no output.
|
||||
env.PERL5LIB = "${with pkgs.perl536Packages; makePerlPath [
|
||||
DBI
|
||||
ClassMethodModifiers
|
||||
CryptEd25519
|
||||
DataDump
|
||||
DBDPg
|
||||
DigestHMAC
|
||||
DigestSHA1
|
||||
EmailAddressXS
|
||||
EmailMIME
|
||||
EmailSimple # required by Email::Mime
|
||||
EmailMessageID # required by Email::Mime
|
||||
EmailMIMEContentType # required by Email::Mime
|
||||
TextUnidecode # required by Email::Mime
|
||||
ModuleRuntime # required by Email::Mime
|
||||
EmailMIMEEncodings # required by Email::Mime
|
||||
FilePath
|
||||
FileSlurper
|
||||
Future
|
||||
GetoptLong
|
||||
HTTPMessage
|
||||
IOAsync
|
||||
IOAsyncSSL
|
||||
IOSocketSSL
|
||||
NetSSLeay
|
||||
JSON
|
||||
ListUtilsBy
|
||||
ScalarListUtils
|
||||
ModulePluggable
|
||||
NetAsyncHTTP
|
||||
MetricsAny # required by Net::Async::HTTP
|
||||
NetAsyncHTTPServer
|
||||
StructDumb
|
||||
URI
|
||||
YAMLLibYAML
|
||||
]}";
|
||||
}
|
||||
];
|
||||
};
|
||||
});
|
||||
};
|
||||
}
|
||||
27
mypy.ini
27
mypy.ini
@@ -21,26 +21,7 @@ files =
|
||||
tests/,
|
||||
build_rust.py
|
||||
|
||||
# Note: Better exclusion syntax coming in mypy > 0.910
|
||||
# https://github.com/python/mypy/pull/11329
|
||||
#
|
||||
# For now, set the (?x) flag enable "verbose" regexes
|
||||
# https://docs.python.org/3/library/re.html#re.X
|
||||
exclude = (?x)
|
||||
^(
|
||||
|synapse/storage/databases/__init__.py
|
||||
|synapse/storage/databases/main/cache.py
|
||||
|synapse/storage/schema/
|
||||
)$
|
||||
|
||||
[mypy-synapse.federation.transport.client]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-synapse.http.matrixfederationclient]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-synapse.metrics._reactor_metrics]
|
||||
disallow_untyped_defs = False
|
||||
# This module imports select.epoll. That exists on Linux, but doesn't on macOS.
|
||||
# See https://github.com/matrix-org/synapse/pull/11771.
|
||||
warn_unused_ignores = False
|
||||
@@ -48,9 +29,6 @@ warn_unused_ignores = False
|
||||
[mypy-synapse.util.caches.treecache]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-synapse.storage.database]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
[mypy-tests.util.caches.test_descriptors]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
@@ -74,11 +52,6 @@ ignore_missing_imports = True
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
# Note: WIP stubs available at
|
||||
# https://github.com/microsoft/python-type-stubs/tree/64934207f523ad6b611e6cfe039d85d7175d7d0d/netaddr
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-parameterized.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
|
||||
2191
poetry.lock
generated
2191
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.79.0rc1"
|
||||
version = "1.83.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -153,15 +153,13 @@ python = "^3.7.1"
|
||||
# ----------------------
|
||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||
jsonschema = ">=3.0.0"
|
||||
# frozendict 2.1.2 is broken on Debian 10: https://github.com/Marco-Sulla/python-frozendict/issues/41
|
||||
# We cannot test our wheels against the 2.3.5 release in CI. Putting in an upper bound for this
|
||||
# because frozendict has been more trouble than it's worth; we would like to move to immutabledict.
|
||||
frozendict = ">=1,!=2.1.2,<2.3.5"
|
||||
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
||||
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
||||
immutabledict = ">=2.0"
|
||||
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
|
||||
unpaddedbase64 = ">=2.1.0"
|
||||
# We require 1.5.0 to work around an issue when running against the C implementation of
|
||||
# frozendict: https://github.com/matrix-org/python-canonicaljson/issues/36
|
||||
canonicaljson = "^1.5.0"
|
||||
# We require 2.0.0 for immutabledict support.
|
||||
canonicaljson = "^2.0.0"
|
||||
# we use the type definitions added in signedjson 1.1.
|
||||
signedjson = "^1.1.0"
|
||||
# validating SSL certs for IP addresses requires service_identity 18.1.
|
||||
@@ -313,7 +311,7 @@ all = [
|
||||
# We pin black so that our tests don't start failing on new releases.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.3.0"
|
||||
ruff = "0.0.252"
|
||||
ruff = "0.0.265"
|
||||
|
||||
# Typechecking
|
||||
mypy = "*"
|
||||
@@ -321,6 +319,7 @@ mypy-zope = "*"
|
||||
types-bleach = ">=4.1.0"
|
||||
types-commonmark = ">=0.9.2"
|
||||
types-jsonschema = ">=3.2.0"
|
||||
types-netaddr = ">=0.8.0.6"
|
||||
types-opentracing = ">=2.4.2"
|
||||
types-Pillow = ">=8.3.4"
|
||||
types-psycopg2 = ">=2.9.9"
|
||||
@@ -351,6 +350,18 @@ towncrier = ">=18.6.0rc1"
|
||||
# Used for checking the Poetry lockfile
|
||||
tomli = ">=1.2.3"
|
||||
|
||||
|
||||
# Dependencies for building the development documentation
|
||||
[tool.poetry.group.dev-docs]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dev-docs.dependencies]
|
||||
sphinx = {version = "^6.1", python = "^3.8"}
|
||||
sphinx-autodoc2 = {version = "^0.4.2", python = "^3.8"}
|
||||
myst-parser = {version = "^1.0.0", python = "^3.8"}
|
||||
furo = ">=2022.12.7,<2024.0.0"
|
||||
|
||||
|
||||
[build-system]
|
||||
# The upper bounds here are defensive, intended to prevent situations like
|
||||
# #13849 and #14079 where we see buildtime or runtime errors caused by build
|
||||
|
||||
@@ -57,7 +57,7 @@ pub const BASE_PREPEND_OVERRIDE_RULES: &[PushRule] = &[PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.master"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
default: true,
|
||||
default_enabled: false,
|
||||
}];
|
||||
@@ -88,7 +88,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
pattern: Cow::Borrowed("m.notice"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
@@ -122,7 +122,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
pattern: Cow::Borrowed("m.room.member"),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
|
||||
@@ -140,7 +140,7 @@ impl PushRuleEvaluator {
|
||||
/// name.
|
||||
///
|
||||
/// Returns the set of actions, if any, that match (filtering out any
|
||||
/// `dont_notify` actions).
|
||||
/// `dont_notify` and `coalesce` actions).
|
||||
pub fn run(
|
||||
&self,
|
||||
push_rules: &FilteredPushRules,
|
||||
@@ -198,8 +198,9 @@ impl PushRuleEvaluator {
|
||||
let actions = push_rule
|
||||
.actions
|
||||
.iter()
|
||||
// Filter out "dont_notify" actions, as we don't store them.
|
||||
.filter(|a| **a != Action::DontNotify)
|
||||
// Filter out "dont_notify" and "coalesce" actions, as we don't store them
|
||||
// (since they result in no action by the pushers).
|
||||
.filter(|a| **a != Action::DontNotify && **a != Action::Coalesce)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
|
||||
@@ -164,11 +164,13 @@ impl PushRule {
|
||||
/// The "action" Synapse should perform for a matching push rule.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Action {
|
||||
DontNotify,
|
||||
Notify,
|
||||
Coalesce,
|
||||
SetTweak(SetTweak),
|
||||
|
||||
// Legacy actions that should be understood, but are equivalent to no-ops.
|
||||
DontNotify,
|
||||
Coalesce,
|
||||
|
||||
// An unrecognized custom action.
|
||||
Unknown(Value),
|
||||
}
|
||||
@@ -568,7 +570,10 @@ impl FilteredPushRules {
|
||||
.filter(|rule| {
|
||||
// Ignore disabled experimental push rules
|
||||
|
||||
if !self.msc1767_enabled && rule.rule_id.contains("org.matrix.msc1767") {
|
||||
if !self.msc1767_enabled
|
||||
&& (rule.rule_id.contains("org.matrix.msc1767")
|
||||
|| rule.rule_id.contains("org.matrix.msc3933"))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ DISTS = (
|
||||
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04)
|
||||
"ubuntu:kinetic", # 22.10 (EOL 2023-07-20)
|
||||
"ubuntu:lunar", # 23.04 (EOL 2024-01)
|
||||
)
|
||||
|
||||
DESC = """\
|
||||
|
||||
@@ -40,10 +40,32 @@ def main(force_colors: bool) -> None:
|
||||
exec(r, locals)
|
||||
current_schema_version = locals["SCHEMA_VERSION"]
|
||||
|
||||
click.secho(f"Current schema version: {current_schema_version}")
|
||||
|
||||
diffs: List[git.Diff] = repo.remote().refs.develop.commit.diff(None)
|
||||
|
||||
# Get the schema version of the local file to check against current schema on develop
|
||||
with open("synapse/storage/schema/__init__.py", "r") as file:
|
||||
local_schema = file.read()
|
||||
new_locals: Dict[str, Any] = {}
|
||||
exec(local_schema, new_locals)
|
||||
local_schema_version = new_locals["SCHEMA_VERSION"]
|
||||
|
||||
if local_schema_version != current_schema_version:
|
||||
# local schema version must be +/-1 the current schema version on develop
|
||||
if abs(local_schema_version - current_schema_version) != 1:
|
||||
click.secho(
|
||||
"The proposed schema version has diverged more than one version from develop, please fix!",
|
||||
fg="red",
|
||||
bold=True,
|
||||
color=force_colors,
|
||||
)
|
||||
click.get_current_context().exit(1)
|
||||
|
||||
# right, we've changed the schema version within the allowable tolerance so
|
||||
# let's now use the local version as the canonical version
|
||||
current_schema_version = local_schema_version
|
||||
|
||||
click.secho(f"Current schema version: {current_schema_version}")
|
||||
|
||||
seen_deltas = False
|
||||
bad_files = []
|
||||
for diff in diffs:
|
||||
|
||||
@@ -11,6 +11,11 @@
|
||||
# filepath of a local Complement checkout or by setting the COMPLEMENT_REF
|
||||
# environment variable to pull a different branch or commit.
|
||||
#
|
||||
# To use the 'podman' command instead 'docker', set the PODMAN environment
|
||||
# variable. Example:
|
||||
#
|
||||
# PODMAN=1 ./complement.sh
|
||||
#
|
||||
# By default Synapse is run in monolith mode. This can be overridden by
|
||||
# setting the WORKERS environment variable.
|
||||
#
|
||||
@@ -30,7 +35,6 @@
|
||||
# Exit if a line returns a non-zero exit code
|
||||
set -e
|
||||
|
||||
|
||||
# Helper to emit annotations that collapse portions of the log in GitHub Actions
|
||||
echo_if_github() {
|
||||
if [[ -n "$GITHUB_WORKFLOW" ]]; then
|
||||
@@ -100,6 +104,16 @@ done
|
||||
# enable buildkit for the docker builds
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
# Determine whether to use the docker or podman container runtime.
|
||||
if [ -n "$PODMAN" ]; then
|
||||
export CONTAINER_RUNTIME=podman
|
||||
export DOCKER_HOST=unix://$XDG_RUNTIME_DIR/podman/podman.sock
|
||||
export BUILDAH_FORMAT=docker
|
||||
export COMPLEMENT_HOSTNAME_RUNNING_COMPLEMENT=host.containers.internal
|
||||
else
|
||||
export CONTAINER_RUNTIME=docker
|
||||
fi
|
||||
|
||||
# Change to the repository root
|
||||
cd "$(dirname $0)/.."
|
||||
|
||||
@@ -126,16 +140,16 @@ if [ -n "$use_editable_synapse" ]; then
|
||||
editable_mount="$(realpath .):/editable-src:z"
|
||||
if [ -n "$rebuild_editable_synapse" ]; then
|
||||
unset skip_docker_build
|
||||
elif docker inspect complement-synapse-editable &>/dev/null; then
|
||||
elif $CONTAINER_RUNTIME inspect complement-synapse-editable &>/dev/null; then
|
||||
# complement-synapse-editable already exists: see if we can still use it:
|
||||
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
|
||||
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
|
||||
|
||||
# First set up the module in the right place for an editable installation.
|
||||
docker run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
if (docker run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
|
||||
&& docker run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
|
||||
if ($CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
|
||||
&& $CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
|
||||
skip_docker_build=1
|
||||
else
|
||||
echo "Editable Synapse image is stale. Will rebuild."
|
||||
@@ -149,25 +163,25 @@ if [ -z "$skip_docker_build" ]; then
|
||||
|
||||
# Build a special image designed for use in development with editable
|
||||
# installs.
|
||||
docker build -t synapse-editable \
|
||||
$CONTAINER_RUNTIME build -t synapse-editable \
|
||||
-f "docker/editable.Dockerfile" .
|
||||
|
||||
docker build -t synapse-workers-editable \
|
||||
$CONTAINER_RUNTIME build -t synapse-workers-editable \
|
||||
--build-arg FROM=synapse-editable \
|
||||
-f "docker/Dockerfile-workers" .
|
||||
|
||||
docker build -t complement-synapse-editable \
|
||||
$CONTAINER_RUNTIME build -t complement-synapse-editable \
|
||||
--build-arg FROM=synapse-workers-editable \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
|
||||
# Prepare the Rust module
|
||||
docker run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
else
|
||||
|
||||
# Build the base Synapse image from the local checkout
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
|
||||
docker build -t matrixdotorg/synapse \
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse \
|
||||
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
|
||||
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
|
||||
-f "docker/Dockerfile" .
|
||||
@@ -175,12 +189,12 @@ if [ -z "$skip_docker_build" ]; then
|
||||
|
||||
# Build the workers docker image (from the base Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
|
||||
docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||
docker build -t complement-synapse \
|
||||
$CONTAINER_RUNTIME build -t complement-synapse \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
|
||||
@@ -91,6 +91,7 @@ else
|
||||
"synapse" "docker" "tests"
|
||||
"scripts-dev"
|
||||
"contrib" "synmark" "stubs" ".ci"
|
||||
"dev-docs"
|
||||
)
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -280,7 +280,7 @@ def _prepare() -> None:
|
||||
)
|
||||
|
||||
print("Opening the changelog in your browser...")
|
||||
print("Please ask others to give it a check.")
|
||||
print("Please ask #synapse-dev to give it a check.")
|
||||
click.launch(
|
||||
f"https://github.com/matrix-org/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md"
|
||||
)
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Stub for frozendict.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Hashable, Iterable, Iterator, Mapping, Tuple, TypeVar, overload
|
||||
|
||||
_KT = TypeVar("_KT", bound=Hashable) # Key type.
|
||||
_VT = TypeVar("_VT") # Value type.
|
||||
|
||||
class frozendict(Mapping[_KT, _VT]):
|
||||
@overload
|
||||
def __init__(self, **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
def __getitem__(self, key: _KT) -> _VT: ...
|
||||
def __contains__(self, key: Any) -> bool: ...
|
||||
def copy(self, **add_or_replace: Any) -> frozendict: ...
|
||||
def __iter__(self) -> Iterator[_KT]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def __hash__(self) -> int: ...
|
||||
@@ -17,9 +17,9 @@
|
||||
""" This is an implementation of a Matrix homeserver.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict
|
||||
|
||||
from synapse.util.rust import check_rust_lib_up_to_date
|
||||
from synapse.util.stringutils import strtobool
|
||||
@@ -61,11 +61,20 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Use the standard library json implementation instead of simplejson.
|
||||
# Teach canonicaljson how to serialise immutabledicts.
|
||||
try:
|
||||
from canonicaljson import set_json_library
|
||||
from canonicaljson import register_preserialisation_callback
|
||||
from immutabledict import immutabledict
|
||||
|
||||
set_json_library(json)
|
||||
def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]:
|
||||
try:
|
||||
return d._dict
|
||||
except Exception:
|
||||
# Paranoia: fall back to a `dict()` call, in case a future version of
|
||||
# immutabledict removes `_dict` from the implementation.
|
||||
return dict(d)
|
||||
|
||||
register_preserialisation_callback(immutabledict, _immutabledict_cb)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
302
synapse/_scripts/generate_workers_map.py
Executable file
302
synapse/_scripts/generate_workers_map.py
Executable file
@@ -0,0 +1,302 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2022-2023 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterable, Optional, Pattern, Set, Tuple
|
||||
|
||||
import yaml
|
||||
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.federation.transport.server import (
|
||||
TransportLayerServer,
|
||||
register_servlets as register_federation_servlets,
|
||||
)
|
||||
from synapse.http.server import HttpServer, ServletCallback
|
||||
from synapse.rest import ClientRestResource
|
||||
from synapse.rest.key.v2 import RemoteKey
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
|
||||
logger = logging.getLogger("generate_workers_map")
|
||||
|
||||
|
||||
class MockHomeserver(HomeServer):
|
||||
DATASTORE_CLASS = DataStore # type: ignore
|
||||
|
||||
def __init__(self, config: HomeServerConfig, worker_app: Optional[str]) -> None:
|
||||
super().__init__(config.server.server_name, config=config)
|
||||
self.config.worker.worker_app = worker_app
|
||||
|
||||
|
||||
GROUP_PATTERN = re.compile(r"\(\?P<[^>]+?>(.+?)\)")
|
||||
|
||||
|
||||
@dataclass
|
||||
class EndpointDescription:
|
||||
"""
|
||||
Describes an endpoint and how it should be routed.
|
||||
"""
|
||||
|
||||
# The servlet class that handles this endpoint
|
||||
servlet_class: object
|
||||
|
||||
# The category of this endpoint. Is read from the `CATEGORY` constant in the servlet
|
||||
# class.
|
||||
category: Optional[str]
|
||||
|
||||
# TODO:
|
||||
# - does it need to be routed based on a stream writer config?
|
||||
# - does it benefit from any optimised, but optional, routing?
|
||||
# - what 'opinionated synapse worker class' (event_creator, synchrotron, etc) does
|
||||
# it go in?
|
||||
|
||||
|
||||
class EnumerationResource(HttpServer):
|
||||
"""
|
||||
Accepts servlet registrations for the purposes of building up a description of
|
||||
all endpoints.
|
||||
"""
|
||||
|
||||
def __init__(self, is_worker: bool) -> None:
|
||||
self.registrations: Dict[Tuple[str, str], EndpointDescription] = {}
|
||||
self._is_worker = is_worker
|
||||
|
||||
def register_paths(
|
||||
self,
|
||||
method: str,
|
||||
path_patterns: Iterable[Pattern],
|
||||
callback: ServletCallback,
|
||||
servlet_classname: str,
|
||||
) -> None:
|
||||
# federation servlet callbacks are wrapped, so unwrap them.
|
||||
callback = getattr(callback, "__wrapped__", callback)
|
||||
|
||||
# fish out the servlet class
|
||||
servlet_class = callback.__self__.__class__ # type: ignore
|
||||
|
||||
if self._is_worker and method in getattr(
|
||||
servlet_class, "WORKERS_DENIED_METHODS", ()
|
||||
):
|
||||
# This endpoint would cause an error if called on a worker, so pretend it
|
||||
# was never registered!
|
||||
return
|
||||
|
||||
sd = EndpointDescription(
|
||||
servlet_class=servlet_class,
|
||||
category=getattr(servlet_class, "CATEGORY", None),
|
||||
)
|
||||
|
||||
for pat in path_patterns:
|
||||
self.registrations[(method, pat.pattern)] = sd
|
||||
|
||||
|
||||
def get_registered_paths_for_hs(
|
||||
hs: HomeServer,
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given a homeserver, get all registered endpoints and their descriptions.
|
||||
"""
|
||||
|
||||
enumerator = EnumerationResource(is_worker=hs.config.worker.worker_app is not None)
|
||||
ClientRestResource.register_servlets(enumerator, hs)
|
||||
federation_server = TransportLayerServer(hs)
|
||||
|
||||
# we can't use `federation_server.register_servlets` but this line does the
|
||||
# same thing, only it uses this enumerator
|
||||
register_federation_servlets(
|
||||
federation_server.hs,
|
||||
resource=enumerator,
|
||||
ratelimiter=federation_server.ratelimiter,
|
||||
authenticator=federation_server.authenticator,
|
||||
servlet_groups=federation_server.servlet_groups,
|
||||
)
|
||||
|
||||
# the key server endpoints are separate again
|
||||
RemoteKey(hs).register(enumerator)
|
||||
|
||||
return enumerator.registrations
|
||||
|
||||
|
||||
def get_registered_paths_for_default(
|
||||
worker_app: Optional[str], base_config: HomeServerConfig
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given the name of a worker application and a base homeserver configuration,
|
||||
returns:
|
||||
|
||||
Dict from (method, path) to EndpointDescription
|
||||
|
||||
TODO Don't require passing in a config
|
||||
"""
|
||||
|
||||
hs = MockHomeserver(base_config, worker_app)
|
||||
# TODO We only do this to avoid an error, but don't need the database etc
|
||||
hs.setup()
|
||||
return get_registered_paths_for_hs(hs)
|
||||
|
||||
|
||||
def elide_http_methods_if_unconflicting(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
all_possible_registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Elides HTTP methods (by replacing them with `*`) if all possible registered methods
|
||||
can be handled by the worker whose registration map is `registrations`.
|
||||
|
||||
i.e. the only endpoints left with methods (other than `*`) should be the ones where
|
||||
the worker can't handle all possible methods for that path.
|
||||
"""
|
||||
|
||||
def paths_to_methods_dict(
|
||||
methods_and_paths: Iterable[Tuple[str, str]]
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""
|
||||
Given (method, path) pairs, produces a dict from path to set of methods
|
||||
available at that path.
|
||||
"""
|
||||
result: Dict[str, Set[str]] = {}
|
||||
for method, path in methods_and_paths:
|
||||
result.setdefault(path, set()).add(method)
|
||||
return result
|
||||
|
||||
all_possible_reg_methods = paths_to_methods_dict(all_possible_registrations)
|
||||
reg_methods = paths_to_methods_dict(registrations)
|
||||
|
||||
output = {}
|
||||
|
||||
for path, handleable_methods in reg_methods.items():
|
||||
if handleable_methods == all_possible_reg_methods[path]:
|
||||
any_method = next(iter(handleable_methods))
|
||||
# TODO This assumes that all methods have the same servlet.
|
||||
# I suppose that's possibly dubious?
|
||||
output[("*", path)] = registrations[(any_method, path)]
|
||||
else:
|
||||
for method in handleable_methods:
|
||||
output[(method, path)] = registrations[(method, path)]
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def simplify_path_regexes(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription]
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Simplify all the path regexes for the dict of endpoint descriptions,
|
||||
so that we don't use the Python-specific regex extensions
|
||||
(and also to remove needlessly specific detail).
|
||||
"""
|
||||
|
||||
def simplify_path_regex(path: str) -> str:
|
||||
"""
|
||||
Given a regex pattern, replaces all named capturing groups (e.g. `(?P<blah>xyz)`)
|
||||
with a simpler version available in more common regex dialects (e.g. `.*`).
|
||||
"""
|
||||
|
||||
# TODO it's hard to choose between these two;
|
||||
# `.*` is a vague simplification
|
||||
# return GROUP_PATTERN.sub(r"\1", path)
|
||||
return GROUP_PATTERN.sub(r".*", path)
|
||||
|
||||
return {(m, simplify_path_regex(p)): v for (m, p), v in registrations.items()}
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Updates a synapse database to the latest schema and optionally runs background updates"
|
||||
" on it."
|
||||
)
|
||||
)
|
||||
parser.add_argument("-v", action="store_true")
|
||||
parser.add_argument(
|
||||
"--config-path",
|
||||
type=argparse.FileType("r"),
|
||||
required=True,
|
||||
help="Synapse configuration file",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# TODO
|
||||
# logging.basicConfig(**logging_config)
|
||||
|
||||
# Load, process and sanity-check the config.
|
||||
hs_config = yaml.safe_load(args.config_path)
|
||||
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
master_paths = get_registered_paths_for_default(None, config)
|
||||
worker_paths = get_registered_paths_for_default(
|
||||
"synapse.app.generic_worker", config
|
||||
)
|
||||
|
||||
all_paths = {**master_paths, **worker_paths}
|
||||
|
||||
elided_worker_paths = elide_http_methods_if_unconflicting(worker_paths, all_paths)
|
||||
elide_http_methods_if_unconflicting(master_paths, all_paths)
|
||||
|
||||
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
||||
|
||||
categories_to_methods_and_paths: Dict[
|
||||
Optional[str], Dict[Tuple[str, str], EndpointDescription]
|
||||
] = defaultdict(dict)
|
||||
|
||||
for (method, path), desc in elided_worker_paths.items():
|
||||
categories_to_methods_and_paths[desc.category][method, path] = desc
|
||||
|
||||
for category, contents in categories_to_methods_and_paths.items():
|
||||
print_category(category, contents)
|
||||
|
||||
|
||||
def print_category(
|
||||
category_name: Optional[str],
|
||||
elided_worker_paths: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> None:
|
||||
"""
|
||||
Prints out a category, in documentation page style.
|
||||
|
||||
Example:
|
||||
```
|
||||
# Category name
|
||||
/path/xyz
|
||||
|
||||
GET /path/abc
|
||||
```
|
||||
"""
|
||||
|
||||
if category_name:
|
||||
print(f"# {category_name}")
|
||||
else:
|
||||
print("# (Uncategorised requests)")
|
||||
|
||||
for ln in sorted(
|
||||
p for m, p in simplify_path_regexes(elided_worker_paths) if m == "*"
|
||||
):
|
||||
print(ln)
|
||||
print()
|
||||
for ln in sorted(
|
||||
f"{m:6} {p}" for m, p in simplify_path_regexes(elided_worker_paths) if m != "*"
|
||||
):
|
||||
print(ln)
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -18,6 +18,7 @@
|
||||
import argparse
|
||||
import curses
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
@@ -53,11 +54,12 @@ from synapse.logging.context import (
|
||||
)
|
||||
from synapse.notifier import ReplicationNotifier
|
||||
from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
||||
from synapse.storage.databases.main import PushRuleStore
|
||||
from synapse.storage.databases.main import FilteringWorkerStore, PushRuleStore
|
||||
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
|
||||
from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyBackgroundStore
|
||||
from synapse.storage.databases.main.end_to_end_keys import EndToEndKeyBackgroundStore
|
||||
from synapse.storage.databases.main.event_push_actions import EventPushActionsStore
|
||||
from synapse.storage.databases.main.events_bg_updates import (
|
||||
@@ -67,7 +69,11 @@ from synapse.storage.databases.main.media_repository import (
|
||||
MediaRepositoryBackgroundUpdateStore,
|
||||
)
|
||||
from synapse.storage.databases.main.presence import PresenceBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.pusher import PusherWorkerStore
|
||||
from synapse.storage.databases.main.profile import ProfileWorkerStore
|
||||
from synapse.storage.databases.main.pusher import (
|
||||
PusherBackgroundUpdatesStore,
|
||||
PusherWorkerStore,
|
||||
)
|
||||
from synapse.storage.databases.main.receipts import ReceiptsBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.registration import (
|
||||
RegistrationBackgroundUpdateStore,
|
||||
@@ -119,6 +125,7 @@ BOOLEAN_COLUMNS = {
|
||||
"users": ["shadow_banned", "approved"],
|
||||
"un_partial_stated_event_stream": ["rejection_status_changed"],
|
||||
"users_who_share_rooms": ["share_private"],
|
||||
"per_user_experimental_features": ["enabled"],
|
||||
}
|
||||
|
||||
|
||||
@@ -221,10 +228,14 @@ class Store(
|
||||
MainStateBackgroundUpdateStore,
|
||||
UserDirectoryBackgroundUpdateStore,
|
||||
EndToEndKeyBackgroundStore,
|
||||
EndToEndRoomKeyBackgroundStore,
|
||||
StatsStore,
|
||||
AccountDataWorkerStore,
|
||||
FilteringWorkerStore,
|
||||
ProfileWorkerStore,
|
||||
PushRuleStore,
|
||||
PusherWorkerStore,
|
||||
PusherBackgroundUpdatesStore,
|
||||
PresenceBackgroundUpdateStore,
|
||||
ReceiptsBackgroundUpdateStore,
|
||||
RelationsWorkerStore,
|
||||
@@ -1326,10 +1337,17 @@ def main() -> None:
|
||||
filename="port-synapse.log" if args.curses else None,
|
||||
)
|
||||
|
||||
if not os.path.isfile(args.sqlite_database):
|
||||
sys.stderr.write(
|
||||
"The sqlite database you specified does not exist, please check that you have the"
|
||||
"correct path."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
sqlite_config = {
|
||||
"name": "sqlite3",
|
||||
"args": {
|
||||
"database": "file:{}?mode=rw".format(args.sqlite_database),
|
||||
"database": args.sqlite_database,
|
||||
"cp_min": 1,
|
||||
"cp_max": 1,
|
||||
"check_same_thread": False,
|
||||
|
||||
@@ -39,7 +39,7 @@ class AuthBlocking:
|
||||
self._mau_limits_reserved_threepids = (
|
||||
hs.config.server.mau_limits_reserved_threepids
|
||||
)
|
||||
self._server_name = hs.hostname
|
||||
self._is_mine_server_name = hs.is_mine_server_name
|
||||
self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips
|
||||
|
||||
async def check_auth_blocking(
|
||||
@@ -77,7 +77,7 @@ class AuthBlocking:
|
||||
if requester:
|
||||
if requester.authenticated_entity.startswith("@"):
|
||||
user_id = requester.authenticated_entity
|
||||
elif requester.authenticated_entity == self._server_name:
|
||||
elif self._is_mine_server_name(requester.authenticated_entity):
|
||||
# We never block the server from doing actions on behalf of
|
||||
# users.
|
||||
return
|
||||
|
||||
@@ -215,6 +215,8 @@ class EventContentFields:
|
||||
FEDERATE: Final = "m.federate"
|
||||
|
||||
# The creator of the room, as used in `m.room.create` events.
|
||||
#
|
||||
# This is deprecated in MSC2175.
|
||||
ROOM_CREATOR: Final = "creator"
|
||||
|
||||
# Used in m.room.guest_access events.
|
||||
@@ -255,6 +257,7 @@ class AccountDataTypes:
|
||||
DIRECT: Final = "m.direct"
|
||||
IGNORED_USER_LIST: Final = "m.ignored_user_list"
|
||||
TAG: Final = "m.tag"
|
||||
PUSH_RULES: Final = "m.push_rules"
|
||||
|
||||
|
||||
class HistoryVisibility:
|
||||
|
||||
@@ -27,7 +27,7 @@ from synapse.util import json_decoder
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.types import JsonDict
|
||||
from synapse.types import JsonDict, StrCollection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -108,6 +108,11 @@ class Codes(str, Enum):
|
||||
|
||||
USER_AWAITING_APPROVAL = "ORG.MATRIX.MSC3866_USER_AWAITING_APPROVAL"
|
||||
|
||||
AS_PING_URL_NOT_SET = "M_URL_NOT_SET"
|
||||
AS_PING_BAD_STATUS = "M_BAD_STATUS"
|
||||
AS_PING_CONNECTION_TIMEOUT = "M_CONNECTION_TIMEOUT"
|
||||
AS_PING_CONNECTION_FAILED = "M_CONNECTION_FAILED"
|
||||
|
||||
# Attempt to send a second annotation with the same event type & annotation key
|
||||
# MSC2677
|
||||
DUPLICATE_ANNOTATION = "M_DUPLICATE_ANNOTATION"
|
||||
@@ -677,18 +682,27 @@ class FederationPullAttemptBackoffError(RuntimeError):
|
||||
Attributes:
|
||||
event_id: The event_id which we are refusing to pull
|
||||
message: A custom error message that gives more context
|
||||
retry_after_ms: The remaining backoff interval, in milliseconds
|
||||
"""
|
||||
|
||||
def __init__(self, event_ids: List[str], message: Optional[str]):
|
||||
self.event_ids = event_ids
|
||||
def __init__(
|
||||
self, event_ids: "StrCollection", message: Optional[str], retry_after_ms: int
|
||||
):
|
||||
event_ids = list(event_ids)
|
||||
|
||||
if message:
|
||||
error_message = message
|
||||
else:
|
||||
error_message = f"Not attempting to pull event_ids={self.event_ids} because we already tried to pull them recently (backing off)."
|
||||
error_message = (
|
||||
f"Not attempting to pull event_ids={event_ids} because we already "
|
||||
"tried to pull them recently (backing off)."
|
||||
)
|
||||
|
||||
super().__init__(error_message)
|
||||
|
||||
self.event_ids = event_ids
|
||||
self.retry_after_ms = retry_after_ms
|
||||
|
||||
|
||||
class HttpResponseException(CodeMessageException):
|
||||
"""
|
||||
|
||||
@@ -170,11 +170,9 @@ class Filtering:
|
||||
result = await self.store.get_user_filter(user_localpart, filter_id)
|
||||
return FilterCollection(self._hs, result)
|
||||
|
||||
def add_user_filter(
|
||||
self, user_localpart: str, user_filter: JsonDict
|
||||
) -> Awaitable[int]:
|
||||
def add_user_filter(self, user_id: UserID, user_filter: JsonDict) -> Awaitable[int]:
|
||||
self.check_valid_filter(user_filter)
|
||||
return self.store.add_user_filter(user_localpart, user_filter)
|
||||
return self.store.add_user_filter(user_id, user_filter)
|
||||
|
||||
# TODO(paul): surely we should probably add a delete_user_filter or
|
||||
# replace_user_filter at some point? There's no REST API specified for
|
||||
|
||||
@@ -78,7 +78,10 @@ class RoomVersion:
|
||||
# MSC2209: Check 'notifications' key while verifying
|
||||
# m.room.power_levels auth rules.
|
||||
limit_notifications_power_levels: bool
|
||||
# MSC2174/MSC2176: Apply updated redaction rules algorithm.
|
||||
# MSC2175: No longer include the creator in m.room.create events.
|
||||
msc2175_implicit_room_creator: bool
|
||||
# MSC2174/MSC2176: Apply updated redaction rules algorithm, move redacts to
|
||||
# content property.
|
||||
msc2176_redaction_rules: bool
|
||||
# MSC3083: Support the 'restricted' join_rule.
|
||||
msc3083_join_rules: bool
|
||||
@@ -104,6 +107,8 @@ class RoomVersion:
|
||||
# support the flag. Unknown flags are ignored by the evaluator, making conditions
|
||||
# fail if used.
|
||||
msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag
|
||||
# MSC3989: Redact the origin field.
|
||||
msc3989_redaction_rules: bool
|
||||
|
||||
|
||||
class RoomVersions:
|
||||
@@ -116,6 +121,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -125,6 +131,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
V2 = RoomVersion(
|
||||
"2",
|
||||
@@ -135,6 +142,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -144,6 +152,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
V3 = RoomVersion(
|
||||
"3",
|
||||
@@ -154,6 +163,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -163,6 +173,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
V4 = RoomVersion(
|
||||
"4",
|
||||
@@ -173,6 +184,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -182,6 +194,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
V5 = RoomVersion(
|
||||
"5",
|
||||
@@ -192,6 +205,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -201,6 +215,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
V6 = RoomVersion(
|
||||
"6",
|
||||
@@ -211,6 +226,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -220,6 +236,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
MSC2176 = RoomVersion(
|
||||
"org.matrix.msc2176",
|
||||
@@ -230,6 +247,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=True,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -239,6 +257,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
V7 = RoomVersion(
|
||||
"7",
|
||||
@@ -249,6 +268,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -258,6 +278,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
V8 = RoomVersion(
|
||||
"8",
|
||||
@@ -268,6 +289,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=True,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -277,6 +299,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
V9 = RoomVersion(
|
||||
"9",
|
||||
@@ -287,6 +310,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=True,
|
||||
msc3375_redaction_rules=True,
|
||||
@@ -296,6 +320,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
MSC3787 = RoomVersion(
|
||||
"org.matrix.msc3787",
|
||||
@@ -306,6 +331,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=True,
|
||||
msc3375_redaction_rules=True,
|
||||
@@ -315,6 +341,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
V10 = RoomVersion(
|
||||
"10",
|
||||
@@ -325,6 +352,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=True,
|
||||
msc3375_redaction_rules=True,
|
||||
@@ -334,6 +362,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=True,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
MSC2716v4 = RoomVersion(
|
||||
"org.matrix.msc2716v4",
|
||||
@@ -344,6 +373,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
@@ -353,6 +383,7 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=False,
|
||||
msc3667_int_only_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
MSC1767v10 = RoomVersion(
|
||||
# MSC1767 (Extensible Events) based on room version "10"
|
||||
@@ -364,6 +395,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=True,
|
||||
msc3375_redaction_rules=True,
|
||||
@@ -373,6 +405,28 @@ class RoomVersions:
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=True,
|
||||
msc3931_push_features=(PushRuleRoomFlag.EXTENSIBLE_EVENTS,),
|
||||
msc3989_redaction_rules=False,
|
||||
)
|
||||
MSC3989 = RoomVersion(
|
||||
"org.matrix.msc3989",
|
||||
RoomDisposition.UNSTABLE,
|
||||
EventFormatVersions.ROOM_V4_PLUS,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2175_implicit_room_creator=False,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=True,
|
||||
msc3375_redaction_rules=True,
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
msc3787_knock_restricted_join_rule=True,
|
||||
msc3667_int_only_power_levels=True,
|
||||
msc3931_push_features=(),
|
||||
msc3989_redaction_rules=True,
|
||||
)
|
||||
|
||||
|
||||
@@ -392,6 +446,7 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
|
||||
RoomVersions.MSC3787,
|
||||
RoomVersions.V10,
|
||||
RoomVersions.MSC2716v4,
|
||||
RoomVersions.MSC3989,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -41,7 +41,12 @@ from typing_extensions import ParamSpec
|
||||
|
||||
import twisted
|
||||
from twisted.internet import defer, error, reactor as _reactor
|
||||
from twisted.internet.interfaces import IOpenSSLContextFactory, IReactorSSL, IReactorTCP
|
||||
from twisted.internet.interfaces import (
|
||||
IOpenSSLContextFactory,
|
||||
IReactorSSL,
|
||||
IReactorTCP,
|
||||
IReactorUNIX,
|
||||
)
|
||||
from twisted.internet.protocol import ServerFactory
|
||||
from twisted.internet.tcp import Port
|
||||
from twisted.logger import LoggingFile, LogLevel
|
||||
@@ -56,11 +61,9 @@ from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config._base import format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.server import ListenerConfig, ManholeConfig
|
||||
from synapse.config.server import ListenerConfig, ManholeConfig, TCPListenerConfig
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.events.presence_router import load_legacy_presence_router
|
||||
from synapse.events.spamcheck import load_legacy_spam_checkers
|
||||
from synapse.events.third_party_rules import load_legacy_third_party_event_rules
|
||||
from synapse.handlers.auth import load_legacy_password_auth_providers
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
@@ -68,6 +71,10 @@ from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import install_gc_manager, register_threadpool
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from synapse.metrics.jemalloc import setup_jemalloc_stats
|
||||
from synapse.module_api.callbacks.spamchecker_callbacks import load_legacy_spam_checkers
|
||||
from synapse.module_api.callbacks.third_party_event_rules_callbacks import (
|
||||
load_legacy_third_party_event_rules,
|
||||
)
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import SYNAPSE_VERSION
|
||||
from synapse.util.caches.lrucache import setup_expire_lru_cache_entries
|
||||
@@ -351,6 +358,28 @@ def listen_tcp(
|
||||
return r # type: ignore[return-value]
|
||||
|
||||
|
||||
def listen_unix(
|
||||
path: str,
|
||||
mode: int,
|
||||
factory: ServerFactory,
|
||||
reactor: IReactorUNIX = reactor,
|
||||
backlog: int = 50,
|
||||
) -> List[Port]:
|
||||
"""
|
||||
Create a UNIX socket for a given path and 'mode' permission
|
||||
|
||||
Returns:
|
||||
list of twisted.internet.tcp.Port listening for TCP connections
|
||||
"""
|
||||
wantPID = True
|
||||
|
||||
return [
|
||||
# IReactorUNIX returns an object implementing IListeningPort from listenUNIX,
|
||||
# but we know it will be a Port instance.
|
||||
cast(Port, reactor.listenUNIX(path, factory, backlog, mode, wantPID))
|
||||
]
|
||||
|
||||
|
||||
def listen_http(
|
||||
listener_config: ListenerConfig,
|
||||
root_resource: Resource,
|
||||
@@ -359,18 +388,13 @@ def listen_http(
|
||||
context_factory: Optional[IOpenSSLContextFactory],
|
||||
reactor: ISynapseReactor = reactor,
|
||||
) -> List[Port]:
|
||||
port = listener_config.port
|
||||
bind_addresses = listener_config.bind_addresses
|
||||
tls = listener_config.tls
|
||||
|
||||
assert listener_config.http_options is not None
|
||||
|
||||
site_tag = listener_config.http_options.tag
|
||||
if site_tag is None:
|
||||
site_tag = str(port)
|
||||
site_tag = listener_config.get_site_tag()
|
||||
|
||||
site = SynapseSite(
|
||||
"synapse.access.%s.%s" % ("https" if tls else "http", site_tag),
|
||||
"synapse.access.%s.%s"
|
||||
% ("https" if listener_config.is_tls() else "http", site_tag),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
@@ -378,25 +402,41 @@ def listen_http(
|
||||
max_request_body_size=max_request_body_size,
|
||||
reactor=reactor,
|
||||
)
|
||||
if tls:
|
||||
# refresh_certificate should have been called before this.
|
||||
assert context_factory is not None
|
||||
ports = listen_ssl(
|
||||
bind_addresses,
|
||||
port,
|
||||
site,
|
||||
context_factory,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d (TLS)", port)
|
||||
|
||||
if isinstance(listener_config, TCPListenerConfig):
|
||||
if listener_config.is_tls():
|
||||
# refresh_certificate should have been called before this.
|
||||
assert context_factory is not None
|
||||
ports = listen_ssl(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
context_factory,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info(
|
||||
"Synapse now listening on TCP port %d (TLS)", listener_config.port
|
||||
)
|
||||
else:
|
||||
ports = listen_tcp(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d", listener_config.port)
|
||||
|
||||
else:
|
||||
ports = listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
site,
|
||||
reactor=reactor,
|
||||
ports = listen_unix(
|
||||
listener_config.path, listener_config.mode, site, reactor=reactor
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d", port)
|
||||
# getHost() returns a UNIXAddress which contains an instance variable of 'name'
|
||||
# encoded as a byte string. Decode as utf-8 so pretty.
|
||||
logger.info(
|
||||
"Synapse now listening on Unix Socket at: "
|
||||
f"{ports[0].getHost().name.decode('utf-8')}"
|
||||
)
|
||||
|
||||
return ports
|
||||
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ from synapse.app._base import (
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.config.server import ListenerConfig
|
||||
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.server import JsonResource, OptionsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
@@ -51,6 +51,7 @@ from synapse.rest.key.v2 import KeyResource
|
||||
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||
from synapse.rest.well_known import well_known_resource
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.databases.main import ExperimentalFeaturesStore
|
||||
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
|
||||
from synapse.storage.databases.main.appservice import (
|
||||
ApplicationServiceTransactionWorkerStore,
|
||||
@@ -146,6 +147,7 @@ class GenericWorkerSlavedStore(
|
||||
TransactionWorkerStore,
|
||||
LockStore,
|
||||
SessionStore,
|
||||
ExperimentalFeaturesStore,
|
||||
):
|
||||
# Properties that multiple storage classes define. Tell mypy what the
|
||||
# expected type is.
|
||||
@@ -236,12 +238,18 @@ class GenericWorkerServer(HomeServer):
|
||||
if listener.type == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener.type == "manhole":
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not using a unix socket for manhole at this time."
|
||||
)
|
||||
|
||||
elif listener.type == "metrics":
|
||||
if not self.config.metrics.enable_metrics:
|
||||
logger.warning(
|
||||
@@ -249,10 +257,16 @@ class GenericWorkerServer(HomeServer):
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not use a unix socket for metrics at this time."
|
||||
)
|
||||
|
||||
else:
|
||||
logger.warning("Unsupported listener type: %s", listener.type)
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ from synapse.app._base import (
|
||||
)
|
||||
from synapse.config._base import ConfigError, format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.server import ListenerConfig
|
||||
from synapse.config.server import ListenerConfig, TCPListenerConfig
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.additional_resource import AdditionalResource
|
||||
from synapse.http.server import (
|
||||
@@ -78,14 +78,13 @@ class SynapseHomeServer(HomeServer):
|
||||
DATASTORE_CLASS = DataStore # type: ignore
|
||||
|
||||
def _listener_http(
|
||||
self, config: HomeServerConfig, listener_config: ListenerConfig
|
||||
self,
|
||||
config: HomeServerConfig,
|
||||
listener_config: ListenerConfig,
|
||||
) -> Iterable[Port]:
|
||||
port = listener_config.port
|
||||
# Must exist since this is an HTTP listener.
|
||||
assert listener_config.http_options is not None
|
||||
site_tag = listener_config.http_options.tag
|
||||
if site_tag is None:
|
||||
site_tag = str(port)
|
||||
site_tag = listener_config.get_site_tag()
|
||||
|
||||
# We always include a health resource.
|
||||
resources: Dict[str, Resource] = {"/health": HealthResource()}
|
||||
@@ -252,12 +251,17 @@ class SynapseHomeServer(HomeServer):
|
||||
self._listener_http(self.config, listener)
|
||||
)
|
||||
elif listener.type == "manhole":
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_manhole(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
manhole_settings=self.config.server.manhole_settings,
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not use a unix socket for manhole at this time."
|
||||
)
|
||||
elif listener.type == "metrics":
|
||||
if not self.config.metrics.enable_metrics:
|
||||
logger.warning(
|
||||
@@ -265,10 +269,16 @@ class SynapseHomeServer(HomeServer):
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
if isinstance(listener, TCPListenerConfig):
|
||||
_base.listen_metrics(
|
||||
listener.bind_addresses,
|
||||
listener.port,
|
||||
)
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Can not use a unix socket for metrics at this time."
|
||||
)
|
||||
|
||||
else:
|
||||
# this shouldn't happen, as the listener type should have been checked
|
||||
# during parsing
|
||||
|
||||
@@ -17,6 +17,8 @@ import urllib.parse
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
@@ -24,13 +26,14 @@ from typing import (
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from prometheus_client import Counter
|
||||
from typing_extensions import TypeGuard
|
||||
from typing_extensions import Concatenate, ParamSpec, TypeGuard
|
||||
|
||||
from synapse.api.constants import EventTypes, Membership, ThirdPartyEntityKind
|
||||
from synapse.api.errors import CodeMessageException
|
||||
from synapse.api.errors import CodeMessageException, HttpResponseException
|
||||
from synapse.appservice import (
|
||||
ApplicationService,
|
||||
TransactionOneTimeKeysCount,
|
||||
@@ -38,7 +41,7 @@ from synapse.appservice import (
|
||||
)
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.utils import SerializeEventConfig, serialize_event
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.http.client import SimpleHttpClient, is_unknown_endpoint
|
||||
from synapse.types import DeviceListUpdates, JsonDict, ThirdPartyInstanceID
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
|
||||
@@ -78,7 +81,11 @@ sent_todevice_counter = Counter(
|
||||
HOUR_IN_MS = 60 * 60 * 1000
|
||||
|
||||
|
||||
APP_SERVICE_PREFIX = "/_matrix/app/unstable"
|
||||
APP_SERVICE_PREFIX = "/_matrix/app/v1"
|
||||
APP_SERVICE_UNSTABLE_PREFIX = "/_matrix/app/unstable"
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
def _is_valid_3pe_metadata(info: JsonDict) -> bool:
|
||||
@@ -121,6 +128,47 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||
)
|
||||
|
||||
async def _send_with_fallbacks(
|
||||
self,
|
||||
service: "ApplicationService",
|
||||
prefixes: List[str],
|
||||
path: str,
|
||||
func: Callable[Concatenate[str, P], Awaitable[R]],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> R:
|
||||
"""
|
||||
Attempt to call an application service with multiple paths, falling back
|
||||
until one succeeds.
|
||||
|
||||
Args:
|
||||
service: The appliacation service, this provides the base URL.
|
||||
prefixes: A last of paths to try in order for the requests.
|
||||
path: A suffix to append to each prefix.
|
||||
func: The function to call, the first argument will be the full
|
||||
endpoint to fetch. Other arguments are provided by args/kwargs.
|
||||
|
||||
Returns:
|
||||
The return value of func.
|
||||
"""
|
||||
for i, prefix in enumerate(prefixes, start=1):
|
||||
uri = f"{service.url}{prefix}{path}"
|
||||
try:
|
||||
return await func(uri, *args, **kwargs)
|
||||
except HttpResponseException as e:
|
||||
# If an error is received that is due to an unrecognised path,
|
||||
# fallback to next path (if one exists). Otherwise, consider it
|
||||
# a legitimate error and raise.
|
||||
if i < len(prefixes) and is_unknown_endpoint(e):
|
||||
continue
|
||||
raise
|
||||
except Exception:
|
||||
# Unexpected exceptions get sent to the caller.
|
||||
raise
|
||||
|
||||
# The function should always exit via the return or raise above this.
|
||||
raise RuntimeError("Unexpected fallback behaviour. This should never be seen.")
|
||||
|
||||
async def query_user(self, service: "ApplicationService", user_id: str) -> bool:
|
||||
if service.url is None:
|
||||
return False
|
||||
@@ -128,10 +176,12 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
|
||||
try:
|
||||
response = await self.get_json(
|
||||
uri,
|
||||
response = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, ""],
|
||||
f"/users/{urllib.parse.quote(user_id)}",
|
||||
self.get_json,
|
||||
{"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
@@ -140,9 +190,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except CodeMessageException as e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
logger.warning("query_user to %s received %s", uri, e.code)
|
||||
logger.warning("query_user to %s received %s", service.url, e.code)
|
||||
except Exception as ex:
|
||||
logger.warning("query_user to %s threw exception %s", uri, ex)
|
||||
logger.warning("query_user to %s threw exception %s", service.url, ex)
|
||||
return False
|
||||
|
||||
async def query_alias(self, service: "ApplicationService", alias: str) -> bool:
|
||||
@@ -152,21 +202,23 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
|
||||
try:
|
||||
response = await self.get_json(
|
||||
uri,
|
||||
response = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, ""],
|
||||
f"/rooms/{urllib.parse.quote(alias)}",
|
||||
self.get_json,
|
||||
{"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
if response is not None: # just an empty json object
|
||||
return True
|
||||
except CodeMessageException as e:
|
||||
logger.warning("query_alias to %s received %s", uri, e.code)
|
||||
logger.warning("query_alias to %s received %s", service.url, e.code)
|
||||
if e.code == 404:
|
||||
return False
|
||||
except Exception as ex:
|
||||
logger.warning("query_alias to %s threw exception %s", uri, ex)
|
||||
logger.warning("query_alias to %s threw exception %s", service.url, ex)
|
||||
return False
|
||||
|
||||
async def query_3pe(
|
||||
@@ -188,25 +240,24 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = "%s%s/thirdparty/%s/%s" % (
|
||||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
kind,
|
||||
urllib.parse.quote(protocol),
|
||||
)
|
||||
try:
|
||||
args: Mapping[Any, Any] = {
|
||||
**fields,
|
||||
b"access_token": service.hs_token,
|
||||
}
|
||||
response = await self.get_json(
|
||||
uri,
|
||||
response = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
||||
f"/thirdparty/{kind}/{urllib.parse.quote(protocol)}",
|
||||
self.get_json,
|
||||
args=args,
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
if not isinstance(response, list):
|
||||
logger.warning(
|
||||
"query_3pe to %s returned an invalid response %r", uri, response
|
||||
"query_3pe to %s returned an invalid response %r",
|
||||
service.url,
|
||||
response,
|
||||
)
|
||||
return []
|
||||
|
||||
@@ -216,12 +267,12 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
ret.append(r)
|
||||
else:
|
||||
logger.warning(
|
||||
"query_3pe to %s returned an invalid result %r", uri, r
|
||||
"query_3pe to %s returned an invalid result %r", service.url, r
|
||||
)
|
||||
|
||||
return ret
|
||||
except Exception as ex:
|
||||
logger.warning("query_3pe to %s threw exception %s", uri, ex)
|
||||
logger.warning("query_3pe to %s threw exception %s", service.url, ex)
|
||||
return []
|
||||
|
||||
async def get_3pe_protocol(
|
||||
@@ -233,21 +284,20 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
async def _get() -> Optional[JsonDict]:
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
uri = "%s%s/thirdparty/protocol/%s" % (
|
||||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
urllib.parse.quote(protocol),
|
||||
)
|
||||
try:
|
||||
info = await self.get_json(
|
||||
uri,
|
||||
info = await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
||||
f"/thirdparty/protocol/{urllib.parse.quote(protocol)}",
|
||||
self.get_json,
|
||||
{"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
|
||||
if not _is_valid_3pe_metadata(info):
|
||||
logger.warning(
|
||||
"query_3pe_protocol to %s did not return a valid result", uri
|
||||
"query_3pe_protocol to %s did not return a valid result",
|
||||
service.url,
|
||||
)
|
||||
return None
|
||||
|
||||
@@ -260,12 +310,27 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
|
||||
return info
|
||||
except Exception as ex:
|
||||
logger.warning("query_3pe_protocol to %s threw exception %s", uri, ex)
|
||||
logger.warning(
|
||||
"query_3pe_protocol to %s threw exception %s", service.url, ex
|
||||
)
|
||||
return None
|
||||
|
||||
key = (service.id, protocol)
|
||||
return await self.protocol_meta_cache.wrap(key, _get)
|
||||
|
||||
async def ping(self, service: "ApplicationService", txn_id: Optional[str]) -> None:
|
||||
# The caller should check that url is set
|
||||
assert service.url is not None, "ping called without URL being set"
|
||||
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
await self.post_json_get_json(
|
||||
uri=f"{service.url}{APP_SERVICE_PREFIX}/ping",
|
||||
post_json={"transaction_id": txn_id},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
|
||||
async def push_bulk(
|
||||
self,
|
||||
service: "ApplicationService",
|
||||
@@ -305,8 +370,6 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
)
|
||||
txn_id = 0
|
||||
|
||||
uri = service.url + ("/transactions/%s" % urllib.parse.quote(str(txn_id)))
|
||||
|
||||
# Never send ephemeral events to appservices that do not support it
|
||||
body: JsonDict = {"events": serialized_events}
|
||||
if service.supports_ephemeral:
|
||||
@@ -338,8 +401,11 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
}
|
||||
|
||||
try:
|
||||
await self.put_json(
|
||||
uri=uri,
|
||||
await self._send_with_fallbacks(
|
||||
service,
|
||||
[APP_SERVICE_PREFIX, ""],
|
||||
f"/transactions/{urllib.parse.quote(str(txn_id))}",
|
||||
self.put_json,
|
||||
json_body=body,
|
||||
args={"access_token": service.hs_token},
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
@@ -347,7 +413,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug(
|
||||
"push_bulk to %s succeeded! events=%s",
|
||||
uri,
|
||||
service.url,
|
||||
[event.get("event_id") for event in events],
|
||||
)
|
||||
sent_transactions_counter.labels(service.id).inc()
|
||||
@@ -358,7 +424,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except CodeMessageException as e:
|
||||
logger.warning(
|
||||
"push_bulk to %s received code=%s msg=%s",
|
||||
uri,
|
||||
service.url,
|
||||
e.code,
|
||||
e.msg,
|
||||
exc_info=logger.isEnabledFor(logging.DEBUG),
|
||||
@@ -366,7 +432,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except Exception as ex:
|
||||
logger.warning(
|
||||
"push_bulk to %s threw exception(%s) %s args=%s",
|
||||
uri,
|
||||
service.url,
|
||||
type(ex).__name__,
|
||||
ex,
|
||||
ex.args,
|
||||
@@ -375,6 +441,121 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
failed_transactions_counter.labels(service.id).inc()
|
||||
return False
|
||||
|
||||
async def claim_client_keys(
|
||||
self, service: "ApplicationService", query: List[Tuple[str, str, str, int]]
|
||||
) -> Tuple[
|
||||
Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]]
|
||||
]:
|
||||
"""Claim one time keys from an application service.
|
||||
|
||||
Note that any error (including a timeout) is treated as the application
|
||||
service having no information.
|
||||
|
||||
Args:
|
||||
service: The application service to query.
|
||||
query: An iterable of tuples of (user ID, device ID, algorithm).
|
||||
|
||||
Returns:
|
||||
A tuple of:
|
||||
A map of user ID -> a map device ID -> a map of key ID -> JSON dict.
|
||||
|
||||
A copy of the input which has not been fulfilled because the
|
||||
appservice doesn't support this endpoint or has not returned
|
||||
data for that tuple.
|
||||
"""
|
||||
if service.url is None:
|
||||
return {}, query
|
||||
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
# Create the expected payload shape.
|
||||
body: Dict[str, Dict[str, List[str]]] = {}
|
||||
for user_id, device, algorithm, count in query:
|
||||
body.setdefault(user_id, {}).setdefault(device, []).extend(
|
||||
[algorithm] * count
|
||||
)
|
||||
|
||||
uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3983/keys/claim"
|
||||
try:
|
||||
response = await self.post_json_get_json(
|
||||
uri,
|
||||
body,
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
# The appservice doesn't support this endpoint.
|
||||
if is_unknown_endpoint(e):
|
||||
return {}, query
|
||||
logger.warning("claim_keys to %s received %s", uri, e.code)
|
||||
return {}, query
|
||||
except Exception as ex:
|
||||
logger.warning("claim_keys to %s threw exception %s", uri, ex)
|
||||
return {}, query
|
||||
|
||||
# Check if the appservice fulfilled all of the queried user/device/algorithms
|
||||
# or if some are still missing.
|
||||
#
|
||||
# TODO This places a lot of faith in the response shape being correct.
|
||||
missing = []
|
||||
for user_id, device, algorithm, count in query:
|
||||
# Count the number of keys in the response for this algorithm by
|
||||
# checking which key IDs start with the algorithm. This uses that
|
||||
# True == 1 in Python to generate a count.
|
||||
response_count = sum(
|
||||
key_id.startswith(f"{algorithm}:")
|
||||
for key_id in response.get(user_id, {}).get(device, {})
|
||||
)
|
||||
count -= response_count
|
||||
# If the appservice responds with fewer keys than requested, then
|
||||
# consider the request unfulfilled.
|
||||
if count > 0:
|
||||
missing.append((user_id, device, algorithm, count))
|
||||
|
||||
return response, missing
|
||||
|
||||
async def query_keys(
|
||||
self, service: "ApplicationService", query: Dict[str, List[str]]
|
||||
) -> Dict[str, Dict[str, Dict[str, JsonDict]]]:
|
||||
"""Query the application service for keys.
|
||||
|
||||
Note that any error (including a timeout) is treated as the application
|
||||
service having no information.
|
||||
|
||||
Args:
|
||||
service: The application service to query.
|
||||
query: An iterable of tuples of (user ID, device ID, algorithm).
|
||||
|
||||
Returns:
|
||||
A map of device_keys/master_keys/self_signing_keys/user_signing_keys:
|
||||
|
||||
device_keys is a map of user ID -> a map device ID -> device info.
|
||||
"""
|
||||
if service.url is None:
|
||||
return {}
|
||||
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
|
||||
uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3984/keys/query"
|
||||
try:
|
||||
response = await self.post_json_get_json(
|
||||
uri,
|
||||
query,
|
||||
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
# The appservice doesn't support this endpoint.
|
||||
if is_unknown_endpoint(e):
|
||||
return {}
|
||||
logger.warning("query_keys to %s received %s", uri, e.code)
|
||||
return {}
|
||||
except Exception as ex:
|
||||
logger.warning("query_keys to %s threw exception %s", uri, ex)
|
||||
return {}
|
||||
|
||||
return response
|
||||
|
||||
def _serialize(
|
||||
self, service: "ApplicationService", events: Iterable[EventBase]
|
||||
) -> List[JsonDict]:
|
||||
|
||||
@@ -11,9 +11,10 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import Any, Iterable
|
||||
from typing import Any, Dict, Iterable, Type, TypeVar
|
||||
|
||||
import jsonschema
|
||||
from pydantic import BaseModel, ValidationError, parse_obj_as
|
||||
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.types import JsonDict
|
||||
@@ -64,3 +65,28 @@ def json_error_to_config_error(
|
||||
else:
|
||||
path.append(str(p))
|
||||
return ConfigError(e.message, path)
|
||||
|
||||
|
||||
Model = TypeVar("Model", bound=BaseModel)
|
||||
|
||||
|
||||
def parse_and_validate_mapping(
|
||||
config: Any,
|
||||
model_type: Type[Model],
|
||||
) -> Dict[str, Model]:
|
||||
"""Parse `config` as a mapping from strings to a given `Model` type.
|
||||
Args:
|
||||
config: The configuration data to check
|
||||
model_type: The BaseModel to validate and parse against.
|
||||
Returns:
|
||||
Fully validated and parsed Dict[str, Model].
|
||||
Raises:
|
||||
ConfigError, if given improper input.
|
||||
"""
|
||||
try:
|
||||
# type-ignore: mypy doesn't like constructing `Dict[str, model_type]` because
|
||||
# `model_type` is a runtime variable. Pydantic is fine with this.
|
||||
instances = parse_obj_as(Dict[str, model_type], config) # type: ignore[valid-type]
|
||||
except ValidationError as e:
|
||||
raise ConfigError(str(e)) from e
|
||||
return instances
|
||||
|
||||
@@ -33,6 +33,16 @@ class AppServiceConfig(Config):
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
self.app_service_config_files = config.get("app_service_config_files", [])
|
||||
if not isinstance(self.app_service_config_files, list) or not all(
|
||||
type(x) is str for x in self.app_service_config_files
|
||||
):
|
||||
# type-ignore: this function gets arbitrary json value; we do use this path.
|
||||
raise ConfigError(
|
||||
"Expected '%s' to be a list of AS config files:"
|
||||
% (self.app_service_config_files),
|
||||
"app_service_config_files",
|
||||
)
|
||||
|
||||
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
|
||||
|
||||
|
||||
@@ -40,10 +50,6 @@ def load_appservices(
|
||||
hostname: str, config_files: List[str]
|
||||
) -> List[ApplicationService]:
|
||||
"""Returns a list of Application Services from the config files."""
|
||||
if not isinstance(config_files, list):
|
||||
# type-ignore: this function gets arbitrary json value; we do use this path.
|
||||
logger.warning("Expected %s to be a list of AS config files.", config_files) # type: ignore[unreachable]
|
||||
return []
|
||||
|
||||
# Dicts of value -> filename
|
||||
seen_as_tokens: Dict[str, str] = {}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user