Compare commits
176 Commits
v1.57.0
...
ts/spam-er
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ae66c672fe | ||
|
|
84facf769e | ||
|
|
c72d26c1e1 | ||
|
|
c997bfb926 | ||
|
|
29f06704b8 | ||
|
|
989fa33096 | ||
|
|
147f098fb4 | ||
|
|
dbb12a0b54 | ||
|
|
5cfb004595 | ||
|
|
5c00151c28 | ||
|
|
2aad0ae57f | ||
|
|
b44fbdffa4 | ||
|
|
02cdace707 | ||
|
|
efcd899f69 | ||
|
|
735faab2b8 | ||
|
|
c707ea736a | ||
|
|
80b3246528 | ||
|
|
2bae6d93c9 | ||
|
|
239da21c1a | ||
|
|
946b8437cf | ||
|
|
464fe99f52 | ||
|
|
699192fc1a | ||
|
|
8ef0d85acd | ||
|
|
2cdac6f585 | ||
|
|
e5fd23fb6f | ||
|
|
8dd3e0e084 | ||
|
|
ade3008821 | ||
|
|
d80a7ab151 | ||
|
|
615d96ad6e | ||
|
|
34e84fee68 | ||
|
|
bf0c3ca20a | ||
|
|
a00462dd99 | ||
|
|
8de0facaae | ||
|
|
41a882e62d | ||
|
|
fa0eab9c8e | ||
|
|
c5969b346d | ||
|
|
77258b6725 | ||
|
|
18d6c18aa1 | ||
|
|
26c1ad71c5 | ||
|
|
0ce2201932 | ||
|
|
051a1c3f22 | ||
|
|
4337d33a73 | ||
|
|
2607b3e181 | ||
|
|
c2d50e9f6c | ||
|
|
f1fbf75cfc | ||
|
|
a377a43386 | ||
|
|
3a8ee22911 | ||
|
|
bc149a18f6 | ||
|
|
d2784b6567 | ||
|
|
6a17a291a6 | ||
|
|
e923fc20bd | ||
|
|
ef86cf3d28 | ||
|
|
07fa53ec40 | ||
|
|
b8fa24b022 | ||
|
|
9ae0253f4e | ||
|
|
f90d381c7b | ||
|
|
ddc8bba00f | ||
|
|
cc7656099d | ||
|
|
c0379d6e5b | ||
|
|
2d74a8c178 | ||
|
|
7fbf42499d | ||
|
|
116a4c8340 | ||
|
|
332cce8dcf | ||
|
|
ba3fd54bad | ||
|
|
b2df0716bc | ||
|
|
75dff3dc98 | ||
|
|
01e625513a | ||
|
|
873d467976 | ||
|
|
96e0cdbc5a | ||
|
|
9ce51a47f6 | ||
|
|
aa5f5ede33 | ||
|
|
d66d68f917 | ||
|
|
c4514b97db | ||
|
|
77dee1b451 | ||
|
|
5938928c59 | ||
|
|
db2edf5a65 | ||
|
|
13e4386710 | ||
|
|
bf2fea8f7d | ||
|
|
ae7858f184 | ||
|
|
01dcf7532d | ||
|
|
7e6598bcf6 | ||
|
|
8f5d2823df | ||
|
|
8d156ec0ba | ||
|
|
57fac2a234 | ||
|
|
3ae56d125c | ||
|
|
0d9eaa19fd | ||
|
|
0b684b59e5 | ||
|
|
629aa51743 | ||
|
|
5d3509dfda | ||
|
|
5a320baa45 | ||
|
|
f282d5fc11 | ||
|
|
ce6ecdd4b4 | ||
|
|
78b99de7c2 | ||
|
|
5ef673de4f | ||
|
|
d743b25c8f | ||
|
|
30c8e7e408 | ||
|
|
6463244375 | ||
|
|
8a23bde823 | ||
|
|
e8d1ec0e92 | ||
|
|
b76f1a4d5f | ||
|
|
63ba9ba38b | ||
|
|
9986621bc8 | ||
|
|
9cfecd2dc0 | ||
|
|
56c9c6c465 | ||
|
|
6b64ee9ec7 | ||
|
|
f59e3f4c90 | ||
|
|
6d89f1239c | ||
|
|
c48ab3734e | ||
|
|
706456de1f | ||
|
|
ee1601e59d | ||
|
|
6b9e95015b | ||
|
|
416604e3bc | ||
|
|
a54d9b0508 | ||
|
|
f987cdd80b | ||
|
|
30db7fdb91 | ||
|
|
7c063da25c | ||
|
|
730fcda546 | ||
|
|
99ab45423a | ||
|
|
17d99f758a | ||
|
|
e75c7e3b6d | ||
|
|
8a87b4435a | ||
|
|
813d728d09 | ||
|
|
8bac3e0435 | ||
|
|
185da8f0f2 | ||
|
|
d9b71410c2 | ||
|
|
a36a38b1ca | ||
|
|
a50fb411b3 | ||
|
|
b82fff66df | ||
|
|
f46b223354 | ||
|
|
f5668f0b4a | ||
|
|
09b4f6e46d | ||
|
|
01c8f9ca69 | ||
|
|
e5a76ec00b | ||
|
|
103f51d867 | ||
|
|
f8f06fc773 | ||
|
|
05e8a5d298 | ||
|
|
3e2e76ca15 | ||
|
|
ecef741add | ||
|
|
d0c1f4ca4c | ||
|
|
4bc8cb4669 | ||
|
|
eed38c5027 | ||
|
|
c1482a352a | ||
|
|
b80bb7e452 | ||
|
|
798deb3a10 | ||
|
|
a1f87f57ff | ||
|
|
fbdee86004 | ||
|
|
7dec4ce7e4 | ||
|
|
dbe016e258 | ||
|
|
0921d93dcd | ||
|
|
b121a3ad2b | ||
|
|
f8d3ee9570 | ||
|
|
3c758d9808 | ||
|
|
aaaff98202 | ||
|
|
7efddbebef | ||
|
|
960b4fb409 | ||
|
|
a743f7d33e | ||
|
|
0b014eb25e | ||
|
|
535a689cfc | ||
|
|
6b3e0ea6bd | ||
|
|
8af8a9bce5 | ||
|
|
8e2759f2d8 | ||
|
|
0922462fc7 | ||
|
|
73d8ded0b0 | ||
|
|
e3a49f4784 | ||
|
|
d24cd17820 | ||
|
|
36d8b83888 | ||
|
|
32545d2e26 | ||
|
|
5a275a2377 | ||
|
|
58c657322a | ||
|
|
aa28110264 | ||
|
|
4bdbebccb9 | ||
|
|
ba1588461b | ||
|
|
a468768104 | ||
|
|
9535fd0f9c | ||
|
|
320186319a | ||
|
|
86cf6a3a17 |
4
.ci/latest_deps_build_failed_issue_template.md
Normal file
4
.ci/latest_deps_build_failed_issue_template.md
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: CI run against latest deps is failing
|
||||
---
|
||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
||||
@@ -1,8 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
||||
@@ -8,8 +8,4 @@
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
|
||||
# TODO: remove these once we have moved over to using poetry-core in pyproject.toml
|
||||
!MANIFEST.in
|
||||
!setup.py
|
||||
|
||||
**/__pycache__
|
||||
|
||||
30
.github/workflows/docker.yml
vendored
30
.github/workflows/docker.yml
vendored
@@ -34,32 +34,24 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# TODO: consider using https://github.com/docker/metadata-action instead of this
|
||||
# custom magic
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
run: |
|
||||
case "${GITHUB_REF}" in
|
||||
refs/heads/develop)
|
||||
tag=develop
|
||||
;;
|
||||
refs/heads/master|refs/heads/main)
|
||||
tag=latest
|
||||
;;
|
||||
refs/tags/*)
|
||||
tag=${GITHUB_REF#refs/tags/}
|
||||
;;
|
||||
*)
|
||||
tag=${GITHUB_SHA}
|
||||
;;
|
||||
esac
|
||||
echo "::set-output name=tag::$tag"
|
||||
uses: docker/metadata-action@master
|
||||
with:
|
||||
images: matrixdotorg/synapse
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=pep440,pattern={{raw}}
|
||||
|
||||
- name: Build and push all platforms
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
159
.github/workflows/latest_deps.yml
vendored
Normal file
159
.github/workflows/latest_deps.yml
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
||||
# dependencies which match the broad requirements. Since most CI runs are against
|
||||
# the locked poetry environment, run specifically against the latest dependencies to
|
||||
# know if there's an upcoming breaking change.
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
|
||||
name: Latest dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.2.0b1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --no-dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- database: "sqlite"
|
||||
- database: "postgres"
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
|
||||
sytest:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
env:
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
working-directory: /src
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
|
||||
# TODO: run complement (as with twisted trunk, see #12473).
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
|
||||
44
.github/workflows/tests.yml
vendored
44
.github/workflows/tests.yml
vendored
@@ -15,24 +15,14 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install -e .
|
||||
- run: pip install .
|
||||
- run: scripts-dev/generate_sample_config.sh --check
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toxenv:
|
||||
- "check_codestyle"
|
||||
- "check_isort"
|
||||
- "mypy"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install tox
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
||||
with:
|
||||
typechecking-extras: "all"
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -71,23 +61,23 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
database: ["sqlite"]
|
||||
toxenv: ["py"]
|
||||
extras: ["all"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.10"
|
||||
toxenv: "py-noextras"
|
||||
extras: ""
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.7"
|
||||
database: "postgres"
|
||||
postgres-version: "10"
|
||||
toxenv: "py"
|
||||
extras: "all"
|
||||
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
database: "postgres"
|
||||
postgres-version: "14"
|
||||
toxenv: "py"
|
||||
extras: "all"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -99,17 +89,16 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
extras: ${{ matrix.extras }}
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
@@ -156,23 +145,24 @@ jobs:
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.7"]
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
|
||||
48
.github/workflows/twisted_trunk.yml
vendored
48
.github/workflows/twisted_trunk.yml
vendored
@@ -6,16 +6,27 @@ on:
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e mypy
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -23,14 +34,15 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
python-version: "3.x"
|
||||
extras: "all test"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- run: poetry run trial --jobs 2 tests
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
@@ -55,11 +67,23 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
run: .ci/patch_for_twisted_trunk.sh
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
# We symlink it before running poetry so that poetry actually
|
||||
# ends up installing to `/venv`.
|
||||
run: |
|
||||
ln -s -T /venv /src/.venv
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
env:
|
||||
# Use offline mode to avoid reinstalling the pinned version of
|
||||
# twisted.
|
||||
OFFLINE: 1
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -15,8 +15,7 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want the poetry lockfile. TODO: is there a good reason for ignoring
|
||||
# '*.lock' above? If not, let's nuke it.
|
||||
# We do want the poetry lockfile.
|
||||
!poetry.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
|
||||
226
CHANGES.md
226
CHANGES.md
@@ -1,3 +1,229 @@
|
||||
Synapse 1.59.0rc1 (2022-05-10)
|
||||
==============================
|
||||
|
||||
This release makes several changes that server administrators should be aware of:
|
||||
|
||||
- Device name lookup over federation is now disabled by default. ([\#12616](https://github.com/matrix-org/synapse/issues/12616))
|
||||
- The `synapse.app.appservice` and `synapse.app.user_dir` worker application types are now deprecated. ([\#12452](https://github.com/matrix-org/synapse/issues/12452), [\#12654](https://github.com/matrix-org/synapse/issues/12654))
|
||||
|
||||
See [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1590) for more details.
|
||||
|
||||
Additionally, this release removes the non-standard `m.login.jwt` login type from Synapse. It can be replaced with `org.matrix.login.jwt` for identical behaviour. This is only used if `jwt_config.enabled` is set to `true` in the configuration. ([\#12597](https://github.com/matrix-org/synapse/issues/12597))
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Support [MSC3266](https://github.com/matrix-org/matrix-doc/pull/3266) room summaries over federation. ([\#11507](https://github.com/matrix-org/synapse/issues/11507))
|
||||
- Implement [changes](https://github.com/matrix-org/matrix-spec-proposals/pull/2285/commits/4a77139249c2e830aec3c7d6bd5501a514d1cc27) to [MSC2285 (hidden read receipts)](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). Contributed by @SimonBrandner. ([\#12168](https://github.com/matrix-org/synapse/issues/12168), [\#12635](https://github.com/matrix-org/synapse/issues/12635), [\#12636](https://github.com/matrix-org/synapse/issues/12636), [\#12670](https://github.com/matrix-org/synapse/issues/12670))
|
||||
- Extend the [module API](https://github.com/matrix-org/synapse/blob/release-v1.59/synapse/module_api/__init__.py) to allow modules to change actions for existing push rules of local users. ([\#12406](https://github.com/matrix-org/synapse/issues/12406))
|
||||
- Add the `notify_appservices_from_worker` configuration option (superseding `notify_appservices`) to allow a generic worker to be designated as the worker to send traffic to Application Services. ([\#12452](https://github.com/matrix-org/synapse/issues/12452))
|
||||
- Add the `update_user_directory_from_worker` configuration option (superseding `update_user_directory`) to allow a generic worker to be designated as the worker to update the user directory. ([\#12654](https://github.com/matrix-org/synapse/issues/12654))
|
||||
- Add new `enable_registration_token_3pid_bypass` configuration option to allow registrations via token as an alternative to verifying a 3pid. ([\#12526](https://github.com/matrix-org/synapse/issues/12526))
|
||||
- Implement [MSC3786](https://github.com/matrix-org/matrix-spec-proposals/pull/3786): Add a default push rule to ignore `m.room.server_acl` events. ([\#12601](https://github.com/matrix-org/synapse/issues/12601))
|
||||
- Add new `mau_appservice_trial_days` configuration option to specify a different trial period for users registered via an appservice. ([\#12619](https://github.com/matrix-org/synapse/issues/12619))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.48.0 where the latest thread reply provided failed to include the proper bundled aggregations. ([\#12273](https://github.com/matrix-org/synapse/issues/12273))
|
||||
- Fix a bug introduced in Synapse 1.22.0 where attempting to send a large amount of read receipts to an application service all at once would result in duplicate content and abnormally high memory usage. Contributed by Brad & Nick @ Beeper. ([\#12544](https://github.com/matrix-org/synapse/issues/12544))
|
||||
- Fix a bug introduced in Synapse 1.57.0 which could cause `Failed to calculate hosts in room` errors to be logged for outbound federation. ([\#12570](https://github.com/matrix-org/synapse/issues/12570))
|
||||
- Fix a long-standing bug where status codes would almost always get logged as `200!`, irrespective of the actual status code, when clients disconnect before a request has finished processing. ([\#12580](https://github.com/matrix-org/synapse/issues/12580))
|
||||
- Fix race when persisting an event and deleting a room that could lead to outbound federation breaking. ([\#12594](https://github.com/matrix-org/synapse/issues/12594))
|
||||
- Fix a bug introduced in Synapse 1.53.0 where bundled aggregations for annotations/edits were incorrectly calculated. ([\#12633](https://github.com/matrix-org/synapse/issues/12633))
|
||||
- Fix a long-standing bug where rooms containing power levels with string values could not be upgraded. ([\#12657](https://github.com/matrix-org/synapse/issues/12657))
|
||||
- Prevent memory leak from reoccurring when presence is disabled. ([\#12656](https://github.com/matrix-org/synapse/issues/12656))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Explicitly opt-in to using [BuildKit-specific features](https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md) in the Dockerfile. This fixes issues with building images in some GitLab CI environments. ([\#12541](https://github.com/matrix-org/synapse/issues/12541))
|
||||
- Update the "Build docker images" GitHub Actions workflow to use `docker/metadata-action` to generate docker image tags, instead of a custom shell script. Contributed by @henryclw. ([\#12573](https://github.com/matrix-org/synapse/issues/12573))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Update SQL statements and replace use of old table `user_stats_historical` in docs for Synapse Admins. ([\#12536](https://github.com/matrix-org/synapse/issues/12536))
|
||||
- Add missing linebreak to `pipx` install instructions. ([\#12579](https://github.com/matrix-org/synapse/issues/12579))
|
||||
- Add information about the TCP replication module to docs. ([\#12621](https://github.com/matrix-org/synapse/issues/12621))
|
||||
- Fixes to the formatting of `README.rst`. ([\#12627](https://github.com/matrix-org/synapse/issues/12627))
|
||||
- Fix docs on how to run specific Complement tests using the `complement.sh` test runner. ([\#12664](https://github.com/matrix-org/synapse/issues/12664))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove unstable identifiers from [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069). ([\#12596](https://github.com/matrix-org/synapse/issues/12596))
|
||||
- Remove the unspecified `m.login.jwt` login type and the unstable `uk.half-shot.msc2778.login.application_service` from
|
||||
[MSC2778](https://github.com/matrix-org/matrix-doc/pull/2778). ([\#12597](https://github.com/matrix-org/synapse/issues/12597))
|
||||
- Synapse now requires at least Python 3.7.1 (up from 3.7.0), for compatibility with the latest Twisted trunk. ([\#12613](https://github.com/matrix-org/synapse/issues/12613))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Use supervisord to supervise Postgres and Caddy in the Complement image to reduce restart time. ([\#12480](https://github.com/matrix-org/synapse/issues/12480))
|
||||
- Immediately retry any requests that have backed off when a server comes back online. ([\#12500](https://github.com/matrix-org/synapse/issues/12500))
|
||||
- Use `make_awaitable` instead of `defer.succeed` for return values of mocks in tests. ([\#12505](https://github.com/matrix-org/synapse/issues/12505))
|
||||
- Consistently check if an object is a `frozendict`. ([\#12564](https://github.com/matrix-org/synapse/issues/12564))
|
||||
- Protect module callbacks with read semantics against cancellation. ([\#12568](https://github.com/matrix-org/synapse/issues/12568))
|
||||
- Improve comments and error messages around access tokens. ([\#12577](https://github.com/matrix-org/synapse/issues/12577))
|
||||
- Improve docstrings for the receipts store. ([\#12581](https://github.com/matrix-org/synapse/issues/12581))
|
||||
- Use constants for read-receipts in tests. ([\#12582](https://github.com/matrix-org/synapse/issues/12582))
|
||||
- Log status code of cancelled requests as 499 and avoid logging stack traces for them. ([\#12587](https://github.com/matrix-org/synapse/issues/12587), [\#12663](https://github.com/matrix-org/synapse/issues/12663))
|
||||
- Remove special-case for `twisted` logger from default log config. ([\#12589](https://github.com/matrix-org/synapse/issues/12589))
|
||||
- Use `getClientAddress` instead of the deprecated `getClientIP`. ([\#12599](https://github.com/matrix-org/synapse/issues/12599))
|
||||
- Add link to documentation in Grafana Dashboard. ([\#12602](https://github.com/matrix-org/synapse/issues/12602))
|
||||
- Reduce log spam when running multiple event persisters. ([\#12610](https://github.com/matrix-org/synapse/issues/12610))
|
||||
- Add extra debug logging to federation sender. ([\#12614](https://github.com/matrix-org/synapse/issues/12614))
|
||||
- Prevent remote homeservers from requesting local user device names by default. ([\#12616](https://github.com/matrix-org/synapse/issues/12616))
|
||||
- Add a consistency check on events which we read from the database. ([\#12620](https://github.com/matrix-org/synapse/issues/12620))
|
||||
- Remove use of the `constantly` library and switch to enums for `EventRedactBehaviour`. Contributed by @andrewdoh. ([\#12624](https://github.com/matrix-org/synapse/issues/12624))
|
||||
- Remove unused code related to receipts. ([\#12632](https://github.com/matrix-org/synapse/issues/12632))
|
||||
- Minor improvements to the scripts for running Synapse in worker mode under Complement. ([\#12637](https://github.com/matrix-org/synapse/issues/12637))
|
||||
- Move `pympler` back in to the `all` extras. ([\#12652](https://github.com/matrix-org/synapse/issues/12652))
|
||||
- Fix spelling of `M_UNRECOGNIZED` in comments. ([\#12665](https://github.com/matrix-org/synapse/issues/12665))
|
||||
- Release script: confirm the commit to be tagged before tagging. ([\#12556](https://github.com/matrix-org/synapse/issues/12556))
|
||||
- Fix a typo in the announcement text generated by the Synapse release development script. ([\#12612](https://github.com/matrix-org/synapse/issues/12612))
|
||||
|
||||
### Typechecking
|
||||
|
||||
- Fix scripts-dev to pass typechecking. ([\#12356](https://github.com/matrix-org/synapse/issues/12356))
|
||||
- Add some type hints to datastore. ([\#12485](https://github.com/matrix-org/synapse/issues/12485))
|
||||
- Remove unused `# type: ignore`s. ([\#12531](https://github.com/matrix-org/synapse/issues/12531))
|
||||
- Allow unused `# type: ignore` comments in bleeding edge CI jobs. ([\#12576](https://github.com/matrix-org/synapse/issues/12576))
|
||||
- Remove redundant lines of config from `mypy.ini`. ([\#12608](https://github.com/matrix-org/synapse/issues/12608))
|
||||
- Update to mypy 0.950. ([\#12650](https://github.com/matrix-org/synapse/issues/12650))
|
||||
- Use `Concatenate` to better annotate `_do_execute`. ([\#12666](https://github.com/matrix-org/synapse/issues/12666))
|
||||
- Use `ParamSpec` to refine type hints. ([\#12667](https://github.com/matrix-org/synapse/issues/12667))
|
||||
- Fix mypy against latest pillow stubs. ([\#12671](https://github.com/matrix-org/synapse/issues/12671))
|
||||
|
||||
Synapse 1.58.1 (2022-05-05)
|
||||
===========================
|
||||
|
||||
This patch release includes a fix to the Debian packages, installing the
|
||||
`systemd` and `cache_memory` extra package groups, which were incorrectly
|
||||
omitted in v1.58.0. This primarily prevented Synapse from starting
|
||||
when the `systemd.journal.JournalHandler` log handler was configured.
|
||||
See [#12631](https://github.com/matrix-org/synapse/issues/12631) for further information.
|
||||
|
||||
Otherwise, no significant changes since 1.58.0.
|
||||
|
||||
|
||||
Synapse 1.58.0 (2022-05-03)
|
||||
===========================
|
||||
|
||||
As of this release, the groups/communities feature in Synapse is now disabled by default. See [\#11584](https://github.com/matrix-org/synapse/issues/11584) for details. As mentioned in [the upgrade notes](https://github.com/matrix-org/synapse/blob/develop/docs/upgrade.md#upgrading-to-v1580), this feature will be removed in Synapse 1.61.
|
||||
|
||||
No significant changes since 1.58.0rc2.
|
||||
|
||||
|
||||
Synapse 1.58.0rc2 (2022-04-26)
|
||||
==============================
|
||||
|
||||
This release candidate fixes bugs related to Synapse 1.58.0rc1's logic for handling device list updates.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.58.0rc1 where the main process could consume excessive amounts of CPU and memory while handling sentry logging failures. ([\#12554](https://github.com/matrix-org/synapse/issues/12554))
|
||||
- Fix a bug introduced in Synapse 1.58.0rc1 where opentracing contexts were not correctly sent to whitelisted remote servers with device lists updates. ([\#12555](https://github.com/matrix-org/synapse/issues/12555))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Reduce unnecessary work when handling remote device list updates. ([\#12557](https://github.com/matrix-org/synapse/issues/12557))
|
||||
|
||||
|
||||
Synapse 1.58.0rc1 (2022-04-26)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Implement [MSC3383](https://github.com/matrix-org/matrix-spec-proposals/pull/3383) for including the destination in server-to-server authentication headers. Contributed by @Bubu and @jcgruenhage for Famedly. ([\#11398](https://github.com/matrix-org/synapse/issues/11398))
|
||||
- Docker images and Debian packages from matrix.org now contain a locked set of Python dependencies, greatly improving build reproducibility. ([Board](https://github.com/orgs/matrix-org/projects/54), [\#11537](https://github.com/matrix-org/synapse/issues/11537))
|
||||
- Enable processing of device list updates asynchronously. ([\#12365](https://github.com/matrix-org/synapse/issues/12365), [\#12465](https://github.com/matrix-org/synapse/issues/12465))
|
||||
- Implement [MSC2815](https://github.com/matrix-org/matrix-spec-proposals/pull/2815) to allow room moderators to view redacted event content. Contributed by @tulir @ Beeper. ([\#12427](https://github.com/matrix-org/synapse/issues/12427))
|
||||
- Build Debian packages for Ubuntu 22.04 "Jammy Jellyfish". ([\#12543](https://github.com/matrix-org/synapse/issues/12543))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Prevent a sync request from removing a user's busy presence status. ([\#12213](https://github.com/matrix-org/synapse/issues/12213))
|
||||
- Fix bug with incremental sync missing events when rejoining/backfilling. Contributed by Nick @ Beeper. ([\#12319](https://github.com/matrix-org/synapse/issues/12319))
|
||||
- Fix a long-standing bug which incorrectly caused `GET /_matrix/client/v3/rooms/{roomId}/event/{eventId}` to return edited events rather than the original. ([\#12476](https://github.com/matrix-org/synapse/issues/12476))
|
||||
- Fix a bug introduced in Synapse 1.27.0 where the admin API for [deleting forward extremities](https://github.com/matrix-org/synapse/blob/erikj/fix_delete_event_response_count/docs/admin_api/rooms.md#deleting-forward-extremities) would always return a count of 1, no matter how many extremities were deleted. ([\#12496](https://github.com/matrix-org/synapse/issues/12496))
|
||||
- Fix a long-standing bug where the image thumbnails embedded into email notifications were broken. ([\#12510](https://github.com/matrix-org/synapse/issues/12510))
|
||||
- Fix a bug in the implementation of [MSC3202](https://github.com/matrix-org/matrix-spec-proposals/pull/3202) where Synapse would use the field name `device_unused_fallback_keys`, rather than `device_unused_fallback_key_types`. ([\#12520](https://github.com/matrix-org/synapse/issues/12520))
|
||||
- Fix a bug introduced in Synapse 0.99.3 which could cause Synapse to consume large amounts of RAM when back-paginating in a large room. ([\#12522](https://github.com/matrix-org/synapse/issues/12522))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix rendering of the documentation site when using the 'print' feature. ([\#12340](https://github.com/matrix-org/synapse/issues/12340))
|
||||
- Add a manual documenting config file options. ([\#12368](https://github.com/matrix-org/synapse/issues/12368), [\#12527](https://github.com/matrix-org/synapse/issues/12527))
|
||||
- Update documentation to reflect that both the `run_background_tasks_on` option and the options for moving stream writers off of the main process are no longer experimental. ([\#12451](https://github.com/matrix-org/synapse/issues/12451))
|
||||
- Update worker documentation and replace old `federation_reader` with `generic_worker`. ([\#12457](https://github.com/matrix-org/synapse/issues/12457))
|
||||
- Strongly recommend [Poetry](https://python-poetry.org/) for development. ([\#12475](https://github.com/matrix-org/synapse/issues/12475))
|
||||
- Add some example configurations for workers and update architectural diagram. ([\#12492](https://github.com/matrix-org/synapse/issues/12492))
|
||||
- Fix a broken link in `README.rst`. ([\#12495](https://github.com/matrix-org/synapse/issues/12495))
|
||||
- Add HAProxy delegation example with CORS headers to docs. ([\#12501](https://github.com/matrix-org/synapse/issues/12501))
|
||||
- Remove extraneous comma in User Admin API's device deletion section so that the example JSON is actually valid and works. Contributed by @olmari. ([\#12533](https://github.com/matrix-org/synapse/issues/12533))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- The groups/communities feature in Synapse is now disabled by default. ([\#12344](https://github.com/matrix-org/synapse/issues/12344))
|
||||
- Remove unstable identifiers from [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440). ([\#12382](https://github.com/matrix-org/synapse/issues/12382))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Preparation for faster-room-join work: start a background process to resynchronise the room state after a room join. ([\#12394](https://github.com/matrix-org/synapse/issues/12394))
|
||||
- Preparation for faster-room-join work: Implement a tracking mechanism to allow functions to wait for full room state to arrive. ([\#12399](https://github.com/matrix-org/synapse/issues/12399))
|
||||
- Remove an unstable identifier from [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083). ([\#12395](https://github.com/matrix-org/synapse/issues/12395))
|
||||
- Run CI in the locked [Poetry](https://python-poetry.org/) environment, and remove corresponding `tox` jobs. ([\#12425](https://github.com/matrix-org/synapse/issues/12425), [\#12434](https://github.com/matrix-org/synapse/issues/12434), [\#12438](https://github.com/matrix-org/synapse/issues/12438), [\#12441](https://github.com/matrix-org/synapse/issues/12441), [\#12449](https://github.com/matrix-org/synapse/issues/12449), [\#12478](https://github.com/matrix-org/synapse/issues/12478), [\#12514](https://github.com/matrix-org/synapse/issues/12514), [\#12472](https://github.com/matrix-org/synapse/issues/12472))
|
||||
- Change Mutual Rooms' `unstable_features` flag to `uk.half-shot.msc2666.mutual_rooms` which matches the current iteration of [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666). ([\#12445](https://github.com/matrix-org/synapse/issues/12445))
|
||||
- Fix typo in the release script help string. ([\#12450](https://github.com/matrix-org/synapse/issues/12450))
|
||||
- Fix a minor typo in the Debian changelogs generated by the release script. ([\#12497](https://github.com/matrix-org/synapse/issues/12497))
|
||||
- Reintroduce the list of targets to the linter script, to avoid linting unwanted local-only directories during development. ([\#12455](https://github.com/matrix-org/synapse/issues/12455))
|
||||
- Limit length of `device_id` to less than 512 characters. ([\#12454](https://github.com/matrix-org/synapse/issues/12454))
|
||||
- Dockerfile-workers: reduce the amount we install in the image. ([\#12464](https://github.com/matrix-org/synapse/issues/12464))
|
||||
- Dockerfile-workers: give the master its own log config. ([\#12466](https://github.com/matrix-org/synapse/issues/12466))
|
||||
- complement-synapse-workers: factor out separate entry point script. ([\#12467](https://github.com/matrix-org/synapse/issues/12467))
|
||||
- Back out experimental implementation of [MSC2314](https://github.com/matrix-org/matrix-spec-proposals/pull/2314). ([\#12474](https://github.com/matrix-org/synapse/issues/12474))
|
||||
- Fix grammatical error in federation error response when the room version of a room is unknown. ([\#12483](https://github.com/matrix-org/synapse/issues/12483))
|
||||
- Remove unnecessary configuration overrides in tests. ([\#12511](https://github.com/matrix-org/synapse/issues/12511))
|
||||
- Refactor the relations code for clarity. ([\#12519](https://github.com/matrix-org/synapse/issues/12519))
|
||||
- Add type hints so `docker` and `stubs` directories pass `mypy --disallow-untyped-defs`. ([\#12528](https://github.com/matrix-org/synapse/issues/12528))
|
||||
- Update `delay_cancellation` to accept any awaitable, rather than just `Deferred`s. ([\#12468](https://github.com/matrix-org/synapse/issues/12468))
|
||||
- Handle cancellation in `EventsWorkerStore._get_events_from_cache_or_db`. ([\#12529](https://github.com/matrix-org/synapse/issues/12529))
|
||||
|
||||
|
||||
Synapse 1.57.1 (2022-04-20)
|
||||
===========================
|
||||
|
||||
This is a patch release that only affects the Docker image. It is only of interest to administrators using [the LDAP module][LDAPModule] to authenticate their users.
|
||||
If you have already upgraded to Synapse 1.57.0 without problem, then you have no need to upgrade to this patch release.
|
||||
|
||||
[LDAPModule]: https://github.com/matrix-org/matrix-synapse-ldap3
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Include version 0.2.0 of the Synapse LDAP Auth Provider module in the Docker image. This matches the version that was present in the Docker image for Synapse v1.56.0. ([\#12512](https://github.com/matrix-org/synapse/issues/12512))
|
||||
|
||||
|
||||
Synapse 1.57.0 (2022-04-19)
|
||||
===========================
|
||||
|
||||
|
||||
54
MANIFEST.in
54
MANIFEST.in
@@ -1,54 +0,0 @@
|
||||
include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
include *.md
|
||||
include demo/README
|
||||
include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
include synapse/py.typed
|
||||
recursive-include synapse/storage *.sql
|
||||
recursive-include synapse/storage *.sql.postgres
|
||||
recursive-include synapse/storage *.sql.sqlite
|
||||
recursive-include synapse/storage *.py
|
||||
recursive-include synapse/storage *.txt
|
||||
recursive-include synapse/storage *.md
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.p8
|
||||
recursive-include tests *.crt
|
||||
recursive-include tests *.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
recursive-include synapse/static *.gif
|
||||
recursive-include synapse/static *.html
|
||||
recursive-include synapse/static *.js
|
||||
|
||||
exclude .codecov.yml
|
||||
exclude .coveragerc
|
||||
exclude .dockerignore
|
||||
exclude .editorconfig
|
||||
exclude Dockerfile
|
||||
exclude mypy.ini
|
||||
exclude sytest-blacklist
|
||||
exclude test_postgresql.sh
|
||||
|
||||
include book.toml
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
include .flake8
|
||||
prune .circleci
|
||||
prune .github
|
||||
prune .ci
|
||||
prune contrib
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune stubs
|
||||
37
README.rst
37
README.rst
@@ -55,7 +55,7 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
||||
generation of fully open and interoperable messaging and VoIP apps for the
|
||||
internet.
|
||||
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
team, written in Python 3/Twisted.
|
||||
|
||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||
@@ -293,39 +293,42 @@ directory of your choice::
|
||||
git clone https://github.com/matrix-org/synapse.git
|
||||
cd synapse
|
||||
|
||||
Synapse has a number of external dependencies, that are easiest
|
||||
to install using pip and a virtualenv::
|
||||
Synapse has a number of external dependencies. We maintain a fixed development
|
||||
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
|
||||
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,dev]"
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
|
||||
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
|
||||
for other installation methods.) Then ask poetry to create a virtual environment
|
||||
from the project and install Synapse's dependencies::
|
||||
|
||||
poetry install --extras "all test"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env. If any dependencies fail to install,
|
||||
try installing the failing modules individually::
|
||||
dependencies into a virtual env.
|
||||
|
||||
pip install -e "module-name"
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
poetry run ./demo/start.sh
|
||||
|
||||
./demo/start.sh
|
||||
(to stop, you can use ``poetry run ./demo/stop.sh``)
|
||||
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
|
||||
See the [demo documentation](https://matrix-org.github.io/synapse/develop/development/demo.html)
|
||||
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
||||
for more information.
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
python -m synapse.app.homeserver \
|
||||
poetry run synapse_homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
poetry run synapse_homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
Running the unit tests
|
||||
@@ -334,7 +337,7 @@ Running the unit tests
|
||||
After getting up and running, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
|
||||
trial tests
|
||||
poetry run trial tests
|
||||
|
||||
This should end with a 'PASSED' result (note that exact numbers will
|
||||
differ)::
|
||||
|
||||
1
changelog.d/12477.misc
Normal file
1
changelog.d/12477.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add some type hints to datastore.
|
||||
1
changelog.d/12586.misc
Normal file
1
changelog.d/12586.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add `@cancellable` decorator, for use on endpoint methods that can be cancelled when clients disconnect.
|
||||
1
changelog.d/12588.misc
Normal file
1
changelog.d/12588.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add ability to cancel disconnected requests to `SynapseRequest`.
|
||||
1
changelog.d/12630.misc
Normal file
1
changelog.d/12630.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add a helper class for testing request cancellation.
|
||||
1
changelog.d/12676.misc
Normal file
1
changelog.d/12676.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve documentation of the `synapse.push` module.
|
||||
1
changelog.d/12677.misc
Normal file
1
changelog.d/12677.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor functions to on `PushRuleEvaluatorForEvent`.
|
||||
1
changelog.d/12679.misc
Normal file
1
changelog.d/12679.misc
Normal file
@@ -0,0 +1 @@
|
||||
Preparation for database schema simplifications: stop writing to `event_reference_hashes`.
|
||||
1
changelog.d/12683.bugfix
Normal file
1
changelog.d/12683.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in Synapse 1.57.0 where `/messages` would throw a 500 error when querying for a non-existent room.
|
||||
1
changelog.d/12689.misc
Normal file
1
changelog.d/12689.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `EventContext` class.
|
||||
1
changelog.d/12694.misc
Normal file
1
changelog.d/12694.misc
Normal file
@@ -0,0 +1 @@
|
||||
Capture the `Deferred` for request cancellation in `_AsyncResource`.
|
||||
1
changelog.d/12695.misc
Normal file
1
changelog.d/12695.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fixes an incorrect type hint for `Filter._check_event_relations`.
|
||||
@@ -66,6 +66,18 @@
|
||||
],
|
||||
"title": "Dashboards",
|
||||
"type": "dashboards"
|
||||
},
|
||||
{
|
||||
"asDropdown": false,
|
||||
"icon": "external link",
|
||||
"includeVars": false,
|
||||
"keepTime": false,
|
||||
"tags": [],
|
||||
"targetBlank": true,
|
||||
"title": "Synapse Documentation",
|
||||
"tooltip": "Open Documentation",
|
||||
"type": "link",
|
||||
"url": "https://matrix-org.github.io/synapse/latest/"
|
||||
}
|
||||
],
|
||||
"panels": [
|
||||
@@ -10889,4 +10901,4 @@
|
||||
"title": "Synapse",
|
||||
"uid": "000000012",
|
||||
"version": 100
|
||||
}
|
||||
}
|
||||
|
||||
22
debian/build_virtualenv
vendored
22
debian/build_virtualenv
vendored
@@ -30,9 +30,23 @@ case $(dpkg-architecture -q DEB_HOST_ARCH) in
|
||||
;;
|
||||
esac
|
||||
|
||||
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
||||
# than the 2/3 compatible `virtualenv`.
|
||||
# Manually install Poetry and export a pip-compatible `requirements.txt`
|
||||
# We need a Poetry pre-release as the export command is buggy in < 1.2
|
||||
TEMP_VENV="$(mktemp -d)"
|
||||
python3 -m venv "$TEMP_VENV"
|
||||
source "$TEMP_VENV/bin/activate"
|
||||
pip install -U pip
|
||||
pip install poetry==1.2.0b1
|
||||
poetry export \
|
||||
--extras all \
|
||||
--extras test \
|
||||
--extras systemd \
|
||||
-o exported_requirements.txt
|
||||
deactivate
|
||||
rm -rf "$TEMP_VENV"
|
||||
|
||||
# Use --no-deps to only install pinned versions in exported_requirements.txt,
|
||||
# and to avoid https://github.com/pypa/pip/issues/9644
|
||||
dh_virtualenv \
|
||||
--install-suffix "matrix-synapse" \
|
||||
--builtin-venv \
|
||||
@@ -41,9 +55,11 @@ dh_virtualenv \
|
||||
--preinstall="lxml" \
|
||||
--preinstall="mock" \
|
||||
--preinstall="wheel" \
|
||||
--extra-pip-arg="--no-deps" \
|
||||
--extra-pip-arg="--no-cache-dir" \
|
||||
--extra-pip-arg="--compile" \
|
||||
--extras="all,systemd,test"
|
||||
--extras="all,systemd,test" \
|
||||
--requirements="exported_requirements.txt"
|
||||
|
||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||
|
||||
42
debian/changelog
vendored
42
debian/changelog
vendored
@@ -1,3 +1,45 @@
|
||||
matrix-synapse-py3 (1.59.0~rc1) stable; urgency=medium
|
||||
|
||||
* Adjust how the `exported-requirements.txt` file is generated as part of
|
||||
the process of building these packages. This affects the package
|
||||
maintainers only; end-users are unaffected.
|
||||
* New Synapse release 1.59.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 May 2022 10:45:08 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.1) stable; urgency=medium
|
||||
|
||||
* Include python dependencies from the `systemd` and `cache_memory` extras package groups, which
|
||||
were incorrectly omitted from the 1.58.0 package.
|
||||
* New Synapse release 1.58.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 05 May 2022 14:58:23 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.58.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 May 2022 10:52:58 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.58.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 17:14:56 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.0~rc1) stable; urgency=medium
|
||||
|
||||
* Use poetry to manage the bundled virtualenv included with this package.
|
||||
* New Synapse release 1.58.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 11:15:20 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Apr 2022 15:27:21 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.0.
|
||||
|
||||
1
debian/clean
vendored
Normal file
1
debian/clean
vendored
Normal file
@@ -0,0 +1 @@
|
||||
exported_requirements.txt
|
||||
@@ -1,3 +1,4 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# Dockerfile to build the matrixdotorg/synapse docker images.
|
||||
#
|
||||
# Note that it uses features which are only available in BuildKit - see
|
||||
@@ -59,7 +60,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
WORKDIR /synapse
|
||||
|
||||
# Copy just what we need to run `poetry export`...
|
||||
COPY pyproject.toml poetry.lock README.rst /synapse/
|
||||
COPY pyproject.toml poetry.lock /synapse/
|
||||
|
||||
RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt
|
||||
|
||||
@@ -98,9 +99,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
# Copy over the rest of the synapse source code.
|
||||
COPY synapse /synapse/synapse/
|
||||
# ... and what we need to `pip install`.
|
||||
# TODO: once pyproject.toml declares poetry-core as its build system, we'll need to copy
|
||||
# pyproject.toml here, ditching setup.py and MANIFEST.in.
|
||||
COPY setup.py MANIFEST.in README.rst /synapse/
|
||||
COPY pyproject.toml README.rst /synapse/
|
||||
|
||||
# Install the synapse package itself.
|
||||
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
|
||||
|
||||
@@ -2,15 +2,27 @@
|
||||
FROM matrixdotorg/synapse
|
||||
|
||||
# Install deps
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y supervisor redis nginx
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
redis-server nginx-light
|
||||
|
||||
# Remove the default nginx sites
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install supervisor~=4.2
|
||||
|
||||
# Disable the default nginx sites
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
@@ -19,5 +31,7 @@ EXPOSE 8080/tcp
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
@@ -13,8 +13,8 @@ RUN curl -OL "https://github.com/caddyserver/caddy/releases/download/v2.3.0/cadd
|
||||
tar xzf caddy_2.3.0_linux_amd64.tar.gz && rm caddy_2.3.0_linux_amd64.tar.gz && mv caddy /root
|
||||
|
||||
# Install postgresql
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y postgresql
|
||||
RUN apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y postgresql-13
|
||||
|
||||
# Configure a user and create a database for Synapse
|
||||
RUN pg_ctlcluster 13 main start && su postgres -c "echo \
|
||||
@@ -34,40 +34,17 @@ WORKDIR /data
|
||||
# Copy the caddy config
|
||||
COPY conf-workers/caddy.complement.json /root/caddy.json
|
||||
|
||||
COPY conf-workers/postgres.supervisord.conf /etc/supervisor/conf.d/postgres.conf
|
||||
COPY conf-workers/caddy.supervisord.conf /etc/supervisor/conf.d/caddy.conf
|
||||
|
||||
# Copy the entrypoint
|
||||
COPY conf-workers/start-complement-synapse-workers.sh /
|
||||
|
||||
# Expose caddy's listener ports
|
||||
EXPOSE 8008 8448
|
||||
|
||||
ENTRYPOINT \
|
||||
# Replace the server name in the caddy config
|
||||
sed -i "s/{{ server_name }}/${SERVER_NAME}/g" /root/caddy.json && \
|
||||
# Start postgres
|
||||
pg_ctlcluster 13 main start 2>&1 && \
|
||||
# Start caddy
|
||||
/root/caddy start --config /root/caddy.json 2>&1 && \
|
||||
# Set the server name of the homeserver
|
||||
SYNAPSE_SERVER_NAME=${SERVER_NAME} \
|
||||
# No need to report stats here
|
||||
SYNAPSE_REPORT_STATS=no \
|
||||
# Set postgres authentication details which will be placed in the homeserver config file
|
||||
POSTGRES_PASSWORD=somesecret POSTGRES_USER=postgres POSTGRES_HOST=localhost \
|
||||
# Specify the workers to test with
|
||||
SYNAPSE_WORKER_TYPES="\
|
||||
event_persister, \
|
||||
event_persister, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
user_dir, \
|
||||
media_repository, \
|
||||
federation_inbound, \
|
||||
federation_reader, \
|
||||
federation_sender, \
|
||||
synchrotron, \
|
||||
appservice, \
|
||||
pusher" \
|
||||
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||
# starts everything else
|
||||
/configure_workers_and_start.py
|
||||
ENTRYPOINT ["/start-complement-synapse-workers.sh"]
|
||||
|
||||
# Update the healthcheck to have a shorter check interval
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
7
docker/complement/conf-workers/caddy.supervisord.conf
Normal file
7
docker/complement/conf-workers/caddy.supervisord.conf
Normal file
@@ -0,0 +1,7 @@
|
||||
[program:caddy]
|
||||
command=/usr/local/bin/prefix-log /root/caddy run --config /root/caddy.json
|
||||
autorestart=unexpected
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
16
docker/complement/conf-workers/postgres.supervisord.conf
Normal file
16
docker/complement/conf-workers/postgres.supervisord.conf
Normal file
@@ -0,0 +1,16 @@
|
||||
[program:postgres]
|
||||
command=/usr/local/bin/prefix-log /usr/bin/pg_ctlcluster 13 main start --foreground
|
||||
|
||||
# Lower priority number = starts first
|
||||
priority=1
|
||||
|
||||
autorestart=unexpected
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
# Use 'Fast Shutdown' mode which aborts current transactions and closes connections quickly.
|
||||
# (Default (TERM) is 'Smart Shutdown' which stops accepting new connections but
|
||||
# lets existing connections close gracefully.)
|
||||
stopsignal=INT
|
||||
44
docker/complement/conf-workers/start-complement-synapse-workers.sh
Executable file
44
docker/complement/conf-workers/start-complement-synapse-workers.sh
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Default ENTRYPOINT for the docker image used for testing synapse with workers under complement
|
||||
|
||||
set -e
|
||||
|
||||
function log {
|
||||
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||
echo "$d $@"
|
||||
}
|
||||
|
||||
# Replace the server name in the caddy config
|
||||
sed -i "s/{{ server_name }}/${SERVER_NAME}/g" /root/caddy.json
|
||||
|
||||
# Set the server name of the homeserver
|
||||
export SYNAPSE_SERVER_NAME=${SERVER_NAME}
|
||||
|
||||
# No need to report stats here
|
||||
export SYNAPSE_REPORT_STATS=no
|
||||
|
||||
# Set postgres authentication details which will be placed in the homeserver config file
|
||||
export POSTGRES_PASSWORD=somesecret
|
||||
export POSTGRES_USER=postgres
|
||||
export POSTGRES_HOST=localhost
|
||||
|
||||
# Specify the workers to test with
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister, \
|
||||
event_persister, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
user_dir, \
|
||||
media_repository, \
|
||||
federation_inbound, \
|
||||
federation_reader, \
|
||||
federation_sender, \
|
||||
synchrotron, \
|
||||
appservice, \
|
||||
pusher"
|
||||
|
||||
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||
# starts everything else
|
||||
exec /configure_workers_and_start.py
|
||||
@@ -103,8 +103,10 @@ experimental_features:
|
||||
spaces_enabled: true
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# server-side support for partial state in /send_join
|
||||
# server-side support for partial state in /send_join responses
|
||||
msc3706_enabled: true
|
||||
# client-side support for partial state in /send_join responses
|
||||
faster_joins: true
|
||||
# Enable jump to date endpoint
|
||||
msc3030_enabled: true
|
||||
|
||||
|
||||
@@ -5,8 +5,11 @@
|
||||
nodaemon=true
|
||||
user=root
|
||||
|
||||
[include]
|
||||
files = /etc/supervisor/conf.d/*.conf
|
||||
|
||||
[program:nginx]
|
||||
command=/usr/sbin/nginx -g "daemon off;"
|
||||
command=/usr/local/bin/prefix-log /usr/sbin/nginx -g "daemon off;"
|
||||
priority=500
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
@@ -16,7 +19,7 @@ username=www-data
|
||||
autorestart=true
|
||||
|
||||
[program:redis]
|
||||
command=/usr/bin/redis-server /etc/redis/redis.conf --daemonize no
|
||||
command=/usr/local/bin/prefix-log /usr/bin/redis-server /etc/redis/redis.conf --daemonize no
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
@@ -26,7 +29,7 @@ username=redis
|
||||
autorestart=true
|
||||
|
||||
[program:synapse_main]
|
||||
command=/usr/local/bin/python -m synapse.app.homeserver --config-path="{{ main_config_path }}" --config-path=/conf/workers/shared.yaml
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver --config-path="{{ main_config_path }}" --config-path=/conf/workers/shared.yaml
|
||||
priority=10
|
||||
# Log startup failures to supervisord's stdout/err
|
||||
# Regular synapse logs will still go in the configured data directory
|
||||
|
||||
@@ -2,11 +2,7 @@ version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
{% if worker_name %}
|
||||
format: '%(asctime)s - worker:{{ worker_name }} - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% else %}
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% endif %}
|
||||
|
||||
handlers:
|
||||
{% if LOG_FILE_PATH %}
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, Set
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Set
|
||||
|
||||
import jinja2
|
||||
import yaml
|
||||
@@ -69,10 +69,10 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"worker_extra_conf": "enable_media_repo: true",
|
||||
},
|
||||
"appservice": {
|
||||
"app": "synapse.app.appservice",
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
"shared_extra_conf": {"notify_appservices": False},
|
||||
"shared_extra_conf": {"notify_appservices_from_worker": "appservice"},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"federation_sender": {
|
||||
@@ -171,7 +171,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
# Templates for sections that may be inserted multiple times in config files
|
||||
SUPERVISORD_PROCESS_CONFIG_BLOCK = """
|
||||
[program:synapse_{name}]
|
||||
command=/usr/local/bin/python -m {app} \
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {app} \
|
||||
--config-path="{config_path}" \
|
||||
--config-path=/conf/workers/shared.yaml \
|
||||
--config-path=/conf/workers/{name}.yaml
|
||||
@@ -201,7 +201,7 @@ upstream {upstream_worker_type} {{
|
||||
|
||||
|
||||
# Utility functions
|
||||
def log(txt: str):
|
||||
def log(txt: str) -> None:
|
||||
"""Log something to the stdout.
|
||||
|
||||
Args:
|
||||
@@ -210,7 +210,7 @@ def log(txt: str):
|
||||
print(txt)
|
||||
|
||||
|
||||
def error(txt: str):
|
||||
def error(txt: str) -> NoReturn:
|
||||
"""Log something and exit with an error code.
|
||||
|
||||
Args:
|
||||
@@ -220,7 +220,7 @@ def error(txt: str):
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def convert(src: str, dst: str, **template_vars):
|
||||
def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
"""Generate a file from a template
|
||||
|
||||
Args:
|
||||
@@ -290,7 +290,7 @@ def add_sharding_to_shared_config(
|
||||
shared_config.setdefault("media_instance_running_background_jobs", worker_name)
|
||||
|
||||
|
||||
def generate_base_homeserver_config():
|
||||
def generate_base_homeserver_config() -> None:
|
||||
"""Starts Synapse and generates a basic homeserver config, which will later be
|
||||
modified for worker support.
|
||||
|
||||
@@ -302,12 +302,14 @@ def generate_base_homeserver_config():
|
||||
subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"])
|
||||
|
||||
|
||||
def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
def generate_worker_files(
|
||||
environ: Mapping[str, str], config_path: str, data_dir: str
|
||||
) -> None:
|
||||
"""Read the desired list of workers from environment variables and generate
|
||||
shared homeserver, nginx and supervisord configs.
|
||||
|
||||
Args:
|
||||
environ: _Environ[str]
|
||||
environ: os.environ instance.
|
||||
config_path: The location of the generated Synapse main worker config file.
|
||||
data_dir: The location of the synapse data directory. Where log and
|
||||
user-facing config files live.
|
||||
@@ -341,7 +343,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# base shared worker jinja2 template.
|
||||
#
|
||||
# This config file will be passed to all workers, included Synapse's main process.
|
||||
shared_config = {"listeners": listeners}
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
|
||||
# The supervisord config. The contents of which will be inserted into the
|
||||
# base supervisord jinja2 template.
|
||||
@@ -369,13 +371,13 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
nginx_locations = {}
|
||||
|
||||
# Read the desired worker configuration from the environment
|
||||
worker_types = environ.get("SYNAPSE_WORKER_TYPES")
|
||||
if worker_types is None:
|
||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES")
|
||||
if worker_types_env is None:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
else:
|
||||
# Split type names by comma
|
||||
worker_types = worker_types.split(",")
|
||||
worker_types = worker_types_env.split(",")
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -446,21 +448,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
|
||||
# Write out the worker's logging config file
|
||||
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||
dir=data_dir, name=worker_name
|
||||
)
|
||||
|
||||
# Render and write the file
|
||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_filepath,
|
||||
worker_name=worker_name,
|
||||
**extra_log_template_args,
|
||||
)
|
||||
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
||||
|
||||
# Then a worker config file
|
||||
convert(
|
||||
@@ -496,6 +484,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
|
||||
# Finally, we'll write out the config files.
|
||||
|
||||
# log config for the master process
|
||||
master_log_config = generate_worker_log_config(environ, "master", data_dir)
|
||||
shared_config["log_config"] = master_log_config
|
||||
|
||||
# Shared homeserver config
|
||||
convert(
|
||||
"/conf/shared.yaml.j2",
|
||||
@@ -512,9 +504,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
)
|
||||
|
||||
# Supervisord config
|
||||
os.makedirs("/etc/supervisor", exist_ok=True)
|
||||
convert(
|
||||
"/conf/supervisord.conf.j2",
|
||||
"/etc/supervisor/conf.d/supervisord.conf",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
main_config_path=config_path,
|
||||
worker_config=supervisord_config,
|
||||
)
|
||||
@@ -532,15 +525,31 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
os.mkdir(log_dir)
|
||||
|
||||
|
||||
def start_supervisord():
|
||||
"""Starts up supervisord which then starts and monitors all other necessary processes
|
||||
def generate_worker_log_config(
|
||||
environ: Mapping[str, str], worker_name: str, data_dir: str
|
||||
) -> str:
|
||||
"""Generate a log.config file for the given worker.
|
||||
|
||||
Raises: CalledProcessError if calling start.py return a non-zero exit code.
|
||||
Returns: the path to the generated file
|
||||
"""
|
||||
subprocess.run(["/usr/bin/supervisord"], stdin=subprocess.PIPE)
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||
dir=data_dir, name=worker_name
|
||||
)
|
||||
# Render and write the file
|
||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_filepath,
|
||||
worker_name=worker_name,
|
||||
**extra_log_template_args,
|
||||
)
|
||||
return log_config_filepath
|
||||
|
||||
|
||||
def main(args, environ):
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||
@@ -567,7 +576,13 @@ def main(args, environ):
|
||||
|
||||
# Start supervisord, which will start Synapse, all of the configured worker
|
||||
# processes, redis, nginx etc. according to the config we created above.
|
||||
start_supervisord()
|
||||
log("Starting supervisord")
|
||||
os.execl(
|
||||
"/usr/local/bin/supervisord",
|
||||
"supervisord",
|
||||
"-c",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
12
docker/prefix-log
Executable file
12
docker/prefix-log
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Prefixes all lines on stdout and stderr with the process name (as determined by
|
||||
# the SUPERVISOR_PROCESS_NAME env var, which is automatically set by Supervisor).
|
||||
#
|
||||
# Usage:
|
||||
# prefix-log command [args...]
|
||||
#
|
||||
|
||||
exec 1> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&1)
|
||||
exec 2> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&2)
|
||||
exec "$@"
|
||||
@@ -6,27 +6,28 @@ import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
# Utility functions
|
||||
def log(txt):
|
||||
def log(txt: str) -> None:
|
||||
print(txt, file=sys.stderr)
|
||||
|
||||
|
||||
def error(txt):
|
||||
def error(txt: str) -> NoReturn:
|
||||
log(txt)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def convert(src, dst, environ):
|
||||
def convert(src: str, dst: str, environ: Mapping[str, object]) -> None:
|
||||
"""Generate a file from a template
|
||||
|
||||
Args:
|
||||
src (str): path to input file
|
||||
dst (str): path to file to write
|
||||
environ (dict): environment dictionary, for replacement mappings.
|
||||
src: path to input file
|
||||
dst: path to file to write
|
||||
environ: environment dictionary, for replacement mappings.
|
||||
"""
|
||||
with open(src) as infile:
|
||||
template = infile.read()
|
||||
@@ -35,25 +36,30 @@ def convert(src, dst, environ):
|
||||
outfile.write(rendered)
|
||||
|
||||
|
||||
def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
def generate_config_from_template(
|
||||
config_dir: str,
|
||||
config_path: str,
|
||||
os_environ: Mapping[str, str],
|
||||
ownership: Optional[str],
|
||||
) -> None:
|
||||
"""Generate a homeserver.yaml from environment variables
|
||||
|
||||
Args:
|
||||
config_dir (str): where to put generated config files
|
||||
config_path (str): where to put the main config file
|
||||
environ (dict): environment dictionary
|
||||
ownership (str|None): "<user>:<group>" string which will be used to set
|
||||
config_dir: where to put generated config files
|
||||
config_path: where to put the main config file
|
||||
os_environ: environment mapping
|
||||
ownership: "<user>:<group>" string which will be used to set
|
||||
ownership of the generated configs. If None, ownership will not change.
|
||||
"""
|
||||
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
||||
if v not in environ:
|
||||
if v not in os_environ:
|
||||
error(
|
||||
"Environment variable '%s' is mandatory when generating a config file."
|
||||
% (v,)
|
||||
)
|
||||
|
||||
# populate some params from data files (if they exist, else create new ones)
|
||||
environ = environ.copy()
|
||||
environ: Dict[str, Any] = dict(os_environ)
|
||||
secrets = {
|
||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||
@@ -127,12 +133,12 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
subprocess.check_output(args)
|
||||
|
||||
|
||||
def run_generate_config(environ, ownership):
|
||||
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||
"""Run synapse with a --generate-config param to generate a template config file
|
||||
|
||||
Args:
|
||||
environ (dict): env var dict
|
||||
ownership (str|None): "userid:groupid" arg for chmod. If None, ownership will not change.
|
||||
environ: env vars from `os.enrivon`.
|
||||
ownership: "userid:groupid" arg for chmod. If None, ownership will not change.
|
||||
|
||||
Never returns.
|
||||
"""
|
||||
@@ -178,7 +184,7 @@ def run_generate_config(environ, ownership):
|
||||
os.execv(sys.executable, args)
|
||||
|
||||
|
||||
def main(args, environ):
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
mode = args[1] if len(args) > 1 else "run"
|
||||
|
||||
# if we were given an explicit user to switch to, do so
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
# Usage
|
||||
- [Federation](federate.md)
|
||||
- [Configuration](usage/configuration/README.md)
|
||||
- [Configuration Manual](usage/configuration/config_documentation.md)
|
||||
- [Homeserver Sample Config File](usage/configuration/homeserver_sample_config.md)
|
||||
- [Logging Sample Config File](usage/configuration/logging_sample_config.md)
|
||||
- [Structured Logging](structured_logging.md)
|
||||
|
||||
@@ -804,7 +804,7 @@ POST /_synapse/admin/v2/users/<user_id>/delete_devices
|
||||
"devices": [
|
||||
"QBUAZIFURK",
|
||||
"AUIECTSRND"
|
||||
],
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -6,60 +6,36 @@ The Synapse codebase uses a number of code formatting tools in order to
|
||||
quickly and automatically check for formatting (and sometimes logical)
|
||||
errors in code.
|
||||
|
||||
The necessary tools are detailed below.
|
||||
The necessary tools are:
|
||||
|
||||
First install them with:
|
||||
- [black](https://black.readthedocs.io/en/stable/), a source code formatter;
|
||||
- [isort](https://pycqa.github.io/isort/), which organises each file's imports;
|
||||
- [flake8](https://flake8.pycqa.org/en/latest/), which can spot common errors; and
|
||||
- [mypy](https://mypy.readthedocs.io/en/stable/), a type checker.
|
||||
|
||||
Install them with:
|
||||
|
||||
```sh
|
||||
pip install -e ".[lint,mypy]"
|
||||
```
|
||||
|
||||
- **black**
|
||||
The easiest way to run the lints is to invoke the linter script as follows.
|
||||
|
||||
The Synapse codebase uses [black](https://pypi.org/project/black/)
|
||||
as an opinionated code formatter, ensuring all comitted code is
|
||||
properly formatted.
|
||||
|
||||
Have `black` auto-format your code (it shouldn't change any
|
||||
functionality) with:
|
||||
|
||||
```sh
|
||||
black .
|
||||
```
|
||||
|
||||
- **flake8**
|
||||
|
||||
`flake8` is a code checking tool. We require code to pass `flake8`
|
||||
before being merged into the codebase.
|
||||
|
||||
Check all application and test code with:
|
||||
|
||||
```sh
|
||||
flake8 .
|
||||
```
|
||||
|
||||
- **isort**
|
||||
|
||||
`isort` ensures imports are nicely formatted, and can suggest and
|
||||
auto-fix issues such as double-importing.
|
||||
|
||||
Auto-fix imports with:
|
||||
|
||||
```sh
|
||||
isort .
|
||||
```
|
||||
```sh
|
||||
scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
It's worth noting that modern IDEs and text editors can run these tools
|
||||
automatically on save. It may be worth looking into whether this
|
||||
functionality is supported in your editor for a more convenient
|
||||
development workflow. It is not, however, recommended to run `flake8` on
|
||||
save as it takes a while and is very resource intensive.
|
||||
development workflow. It is not, however, recommended to run `flake8` or `mypy`
|
||||
on save as they take a while and can be very resource intensive.
|
||||
|
||||
## General rules
|
||||
|
||||
- **Naming**:
|
||||
- Use camel case for class and type names
|
||||
- Use underscores for functions and variables.
|
||||
- Use `CamelCase` for class and type names
|
||||
- Use underscores for `function_names` and `variable_names`.
|
||||
- **Docstrings**: should follow the [google code
|
||||
style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings).
|
||||
See the
|
||||
|
||||
@@ -48,19 +48,28 @@ can find many good git tutorials on the web.
|
||||
|
||||
# 4. Install the dependencies
|
||||
|
||||
Once you have installed Python 3 and added the source, please open a terminal and
|
||||
setup a *virtualenv*, as follows:
|
||||
Synapse uses the [poetry](https://python-poetry.org/) project to manage its dependencies
|
||||
and development environment. Once you have installed Python 3 and added the
|
||||
source, you should install `poetry`.
|
||||
Of their installation methods, we recommend
|
||||
[installing `poetry` using `pipx`](https://python-poetry.org/docs/#installing-with-pipx),
|
||||
|
||||
```shell
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
```
|
||||
|
||||
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
|
||||
for other installation methods.
|
||||
|
||||
Next, open a terminal and install dependencies as follows:
|
||||
|
||||
```sh
|
||||
cd path/where/you/have/cloned/the/repository
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install wheel
|
||||
pip install -e ".[all,dev]"
|
||||
pip install tox
|
||||
poetry install --extras all
|
||||
```
|
||||
|
||||
This will install the developer dependencies for the project.
|
||||
This will install the runtime and developer dependencies for the project.
|
||||
|
||||
|
||||
# 5. Get in touch.
|
||||
@@ -117,11 +126,10 @@ The linters look at your code and do two things:
|
||||
- ensure that your code follows the coding style adopted by the project;
|
||||
- catch a number of errors in your code.
|
||||
|
||||
The linter takes no time at all to run as soon as you've [downloaded the dependencies into your python virtual environment](#4-install-the-dependencies).
|
||||
The linter takes no time at all to run as soon as you've [downloaded the dependencies](#4-install-the-dependencies).
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh
|
||||
poetry run ./scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
Note that this script *will modify your files* to fix styling errors.
|
||||
@@ -131,15 +139,13 @@ If you wish to restrict the linters to only the files changed since the last com
|
||||
(much faster!), you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh -d
|
||||
poetry run ./scripts-dev/lint.sh -d
|
||||
```
|
||||
|
||||
Or if you know exactly which files you wish to lint, you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
poetry run ./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
```
|
||||
|
||||
## Run the unit tests (Twisted trial).
|
||||
@@ -148,16 +154,14 @@ The unit tests run parts of Synapse, including your changes, to see if anything
|
||||
was broken. They are slower than the linters but will typically catch more errors.
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests
|
||||
poetry run trial tests
|
||||
```
|
||||
|
||||
If you wish to only run *some* unit tests, you may specify
|
||||
another module instead of `tests` - or a test class or a method:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
poetry run trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
```
|
||||
|
||||
If your tests fail, you may wish to look at the logs (the default log level is `ERROR`):
|
||||
@@ -169,7 +173,7 @@ less _trial_temp/test.log
|
||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`:
|
||||
|
||||
```sh
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG trial tests
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG poetry run trial tests
|
||||
```
|
||||
|
||||
By default, tests will use an in-memory SQLite database for test data. For additional
|
||||
@@ -180,7 +184,7 @@ database state to be stored in a file named `test.db` under the trial process'
|
||||
working directory. Typically, this ends up being `_trial_temp/test.db`. For example:
|
||||
|
||||
```sh
|
||||
SYNAPSE_TEST_PERSIST_SQLITE_DB=1 trial tests
|
||||
SYNAPSE_TEST_PERSIST_SQLITE_DB=1 poetry run trial tests
|
||||
```
|
||||
|
||||
The database file can then be inspected with:
|
||||
@@ -266,13 +270,13 @@ COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh
|
||||
To run a specific test file, you can pass the test name at the end of the command. The name passed comes from the naming structure in your Complement tests. If you're unsure of the name, you can do a full run and copy it from the test output:
|
||||
|
||||
```sh
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh TestBackfillingHistory
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh -run TestImportHistoricalMessages
|
||||
```
|
||||
|
||||
To run a specific test, you can specify the whole name structure:
|
||||
|
||||
```sh
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh TestBackfillingHistory/parallel/Backfilled_historical_events_resolve_with_proper_state_in_correct_order
|
||||
COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh -run TestImportHistoricalMessages/parallel/Historical_events_resolve_in_the_correct_order
|
||||
```
|
||||
|
||||
|
||||
|
||||
239
docs/development/dependencies.md
Normal file
239
docs/development/dependencies.md
Normal file
@@ -0,0 +1,239 @@
|
||||
# Managing dependencies with Poetry
|
||||
|
||||
This is a quick cheat sheet for developers on how to use [`poetry`](https://python-poetry.org/).
|
||||
|
||||
# Background
|
||||
|
||||
Synapse uses a variety of third-party Python packages to function as a homeserver.
|
||||
Some of these are direct dependencies, listed in `pyproject.toml` under the
|
||||
`[tool.poetry.dependencies]` section. The rest are transitive dependencies (the
|
||||
things that our direct dependencies themselves depend on, and so on recursively.)
|
||||
|
||||
We maintain a locked list of all our dependencies (transitive included) so that
|
||||
we can track exactly which version of each dependency appears in a given release.
|
||||
See [here](https://github.com/matrix-org/synapse/issues/11537#issue-1074469665)
|
||||
for discussion of why we wanted this for Synapse. We chose to use
|
||||
[`poetry`](https://python-poetry.org/) to manage this locked list; see
|
||||
[this comment](https://github.com/matrix-org/synapse/issues/11537#issuecomment-1015975819)
|
||||
for the reasoning.
|
||||
|
||||
The locked dependencies get included in our "self-contained" releases: namely,
|
||||
our docker images and our debian packages. We also use the locked dependencies
|
||||
in development and our continuous integration.
|
||||
|
||||
Separately, our "broad" dependencies—the version ranges specified in
|
||||
`pyproject.toml`—are included as metadata in our "sdists" and "wheels" [uploaded
|
||||
to PyPI](https://pypi.org/project/matrix-synapse). Installing from PyPI or from
|
||||
the Synapse source tree directly will _not_ use the locked dependencies; instead,
|
||||
they'll pull in the latest version of each package available at install time.
|
||||
|
||||
## Example dependency
|
||||
|
||||
An example may help. We have a broad dependency on
|
||||
[`phonenumbers`](https://pypi.org/project/phonenumbers/), as declared in
|
||||
this snippet from pyproject.toml [as of Synapse 1.57](
|
||||
https://github.com/matrix-org/synapse/blob/release-v1.57/pyproject.toml#L133
|
||||
):
|
||||
|
||||
```toml
|
||||
[tool.poetry.dependencies]
|
||||
# ...
|
||||
phonenumbers = ">=8.2.0"
|
||||
```
|
||||
|
||||
In our lockfile this is
|
||||
[pinned]( https://github.com/matrix-org/synapse/blob/dfc7646504cef3e4ff396c36089e1c6f1b1634de/poetry.lock#L679-L685)
|
||||
to version 8.12.44, even though
|
||||
[newer versions are available](https://pypi.org/project/phonenumbers/#history).
|
||||
|
||||
```toml
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "8.12.44"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
```
|
||||
|
||||
The lockfile also includes a
|
||||
[cryptographic checksum](https://github.com/matrix-org/synapse/blob/release-v1.57/poetry.lock#L2178-L2181)
|
||||
of the sdists and wheels provided for this version of `phonenumbers`.
|
||||
|
||||
```toml
|
||||
[metadata.files]
|
||||
# ...
|
||||
phonenumbers = [
|
||||
{file = "phonenumbers-8.12.44-py2.py3-none-any.whl", hash = "sha256:cc1299cf37b309ecab6214297663ab86cb3d64ae37fd5b88e904fe7983a874a6"},
|
||||
{file = "phonenumbers-8.12.44.tar.gz", hash = "sha256:26cfd0257d1704fe2f88caff2caabb70d16a877b1e65b6aae51f9fbbe10aa8ce"},
|
||||
]
|
||||
```
|
||||
|
||||
We can see this pinned version inside the docker image for that release:
|
||||
|
||||
```
|
||||
$ docker pull matrixdotorg/synapse:v1.57.0
|
||||
...
|
||||
$ docker run --entrypoint pip matrixdotorg/synapse:v1.57.0 show phonenumbers
|
||||
Name: phonenumbers
|
||||
Version: 8.12.44
|
||||
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
|
||||
Home-page: https://github.com/daviddrysdale/python-phonenumbers
|
||||
Author: David Drysdale
|
||||
Author-email: dmd@lurklurk.org
|
||||
License: Apache License 2.0
|
||||
Location: /usr/local/lib/python3.9/site-packages
|
||||
Requires:
|
||||
Required-by: matrix-synapse
|
||||
```
|
||||
|
||||
Whereas the wheel metadata just contains the broad dependencies:
|
||||
|
||||
```
|
||||
$ cd /tmp
|
||||
$ wget https://files.pythonhosted.org/packages/ca/5e/d722d572cc5b3092402b783d6b7185901b444427633bd8a6b00ea0dd41b7/matrix_synapse-1.57.0rc1-py3-none-any.whl
|
||||
...
|
||||
$ unzip -c matrix_synapse-1.57.0rc1-py3-none-any.whl matrix_synapse-1.57.0rc1.dist-info/METADATA | grep phonenumbers
|
||||
Requires-Dist: phonenumbers (>=8.2.0)
|
||||
```
|
||||
|
||||
# Tooling recommendation: direnv
|
||||
|
||||
[`direnv`](https://direnv.net/) is a tool for activating environments in your
|
||||
shell inside a given directory. Its support for poetry is unofficial (a
|
||||
community wiki recipe only), but works solidly in our experience. We thoroughly
|
||||
recommend it for daily use. To use it:
|
||||
|
||||
1. [Install `direnv`](https://direnv.net/docs/installation.html) - it's likely
|
||||
packaged for your system already.
|
||||
2. Teach direnv about poetry. The [shell config here](https://github.com/direnv/direnv/wiki/Python#poetry)
|
||||
needs to be added to `~/.config/direnv/direnvrc` (or more generally `$XDG_CONFIG_HOME/direnv/direnvrc`).
|
||||
3. Mark the synapse checkout as a poetry project: `echo layout poetry > .envrc`.
|
||||
4. Convince yourself that you trust this `.envrc` configuration and project.
|
||||
Then formally confirm this to `direnv` by running `direnv allow`.
|
||||
|
||||
Then whenever you navigate to the synapse checkout, you should be able to run
|
||||
e.g. `mypy` instead of `poetry run mypy`; `python` instead of
|
||||
`poetry run python`; and your shell commands will automatically run in the
|
||||
context of poetry's venv, without having to run `poetry shell` beforehand.
|
||||
|
||||
|
||||
# How do I...
|
||||
|
||||
## ...reset my venv to the locked environment?
|
||||
|
||||
```shell
|
||||
poetry install --extras all --remove-untracked
|
||||
```
|
||||
|
||||
## ...run a command in the `poetry` virtualenv?
|
||||
|
||||
Use `poetry run cmd args` when you need the python virtualenv context.
|
||||
To avoid typing `poetry run` all the time, you can run `poetry shell`
|
||||
to start a new shell in the poetry virtualenv context. Within `poetry shell`,
|
||||
`python`, `pip`, `mypy`, `trial`, etc. are all run inside the project virtualenv
|
||||
and isolated from the rest o the system.
|
||||
|
||||
Roughly speaking, the translation from a traditional virtualenv is:
|
||||
- `env/bin/activate` -> `poetry shell`, and
|
||||
- `deactivate` -> close the terminal (Ctrl-D, `exit`, etc.)
|
||||
|
||||
See also the direnv recommendation above, which makes `poetry run` and
|
||||
`poetry shell` unnecessary.
|
||||
|
||||
|
||||
## ...inspect the `poetry` virtualenv?
|
||||
|
||||
Some suggestions:
|
||||
|
||||
```shell
|
||||
# Current env only
|
||||
poetry env info
|
||||
# All envs: this allows you to have e.g. a poetry managed venv for Python 3.7,
|
||||
# and another for Python 3.10.
|
||||
poetry env list --full-path
|
||||
poetry run pip list
|
||||
```
|
||||
|
||||
Note that `poetry show` describes the abstract *lock file* rather than your
|
||||
on-disk environment. With that said, `poetry show --tree` can sometimes be
|
||||
useful.
|
||||
|
||||
|
||||
## ...add a new dependency?
|
||||
|
||||
Either:
|
||||
- manually update `pyproject.toml`; then `poetry lock --no-update`; or else
|
||||
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
|
||||
`--extras` and `--optional` flags in particular.
|
||||
- **NB**: this specifies the new package with a version given by a "caret bound". This won't get forced to its lowest version in the old deps CI job: see [this TODO](https://github.com/matrix-org/synapse/blob/4e1374373857f2f7a911a31c50476342d9070681/.ci/scripts/test_old_deps.sh#L35-L39).
|
||||
|
||||
Include the updated `pyproject.toml` and `poetry.lock` files in your commit.
|
||||
|
||||
## ...remove a dependency?
|
||||
|
||||
This is not done often and is untested, but
|
||||
|
||||
```shell
|
||||
poetry remove packagename
|
||||
```
|
||||
|
||||
ought to do the trick. Alternatively, manually update `pyproject.toml` and
|
||||
`poetry lock --no-update`. Include the updated `pyproject.toml` and poetry.lock`
|
||||
files in your commit.
|
||||
|
||||
## ...update the version range for an existing dependency?
|
||||
|
||||
Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`.
|
||||
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
|
||||
|
||||
## ...update a dependency in the locked environment?
|
||||
|
||||
Use
|
||||
|
||||
```shell
|
||||
poetry update packagename
|
||||
```
|
||||
|
||||
to use the latest version of `packagename` in the locked environment, without
|
||||
affecting the broad dependencies listed in the wheel.
|
||||
|
||||
There doesn't seem to be a way to do this whilst locking a _specific_ version of
|
||||
`packagename`. We can workaround this (crudely) as follows:
|
||||
|
||||
```shell
|
||||
poetry add packagename==1.2.3
|
||||
# This should update pyproject.lock.
|
||||
|
||||
# Now undo the changes to pyproject.toml. For example
|
||||
# git restore pyproject.toml
|
||||
|
||||
# Get poetry to recompute the content-hash of pyproject.toml without changing
|
||||
# the locked package versions.
|
||||
poetry lock --no-update
|
||||
```
|
||||
|
||||
Either way, include the updated `poetry.lock` file in your commit.
|
||||
|
||||
## ...export a `requirements.txt` file?
|
||||
|
||||
```shell
|
||||
poetry export --extras all
|
||||
```
|
||||
|
||||
Be wary of bugs in `poetry export` and `pip install -r requirements.txt`.
|
||||
|
||||
Note: `poetry export` will be made a plugin in Poetry 1.2. Additional config may
|
||||
be required.
|
||||
|
||||
## ...build a test wheel?
|
||||
|
||||
I usually use
|
||||
|
||||
```shell
|
||||
poetry run pip install build && poetry run python -m build
|
||||
```
|
||||
|
||||
because [`build`](https://github.com/pypa/build) is a standardish tool which
|
||||
doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||
`poetry build` too.
|
||||
@@ -17,9 +17,6 @@ follows:
|
||||
}
|
||||
```
|
||||
|
||||
Note that the login type of `m.login.jwt` is supported, but is deprecated. This
|
||||
will be removed in a future version of Synapse.
|
||||
|
||||
The `token` field should include the JSON web token with the following claims:
|
||||
|
||||
* A claim that encodes the local part of the user ID is required. By default,
|
||||
|
||||
@@ -18,6 +18,17 @@ async def check_event_for_spam(event: "synapse.events.EventBase") -> Union[bool,
|
||||
|
||||
Called when receiving an event from a client or via federation. The callback must return
|
||||
either:
|
||||
- on `Decision.ALLOW`, the action is permitted.
|
||||
- on `Decision.DENY`, the action is rejected with a default error message/code.
|
||||
- on `Codes`, the action is rejected with a specific error message/code. In case
|
||||
of doubt, use `Codes.FORBIDDEN`.
|
||||
- (deprecated) on `False`, behave as `Decision.ALLOW`. Deprecated as methods in
|
||||
this API are inconsistent, some expect `True` for `ALLOW` and others `True`
|
||||
for `DENY`.
|
||||
- (deprecated) on `True`, behave as `Decision.DENY`. Deprecated as methods in
|
||||
this API are inconsistent, some expect `True` for `ALLOW` and others `True`
|
||||
for `DENY`.
|
||||
|
||||
- an error message string, to indicate the event must be rejected because of spam and
|
||||
give a rejection reason to forward to clients;
|
||||
- the boolean `True`, to indicate that the event is spammy, but not provide further details; or
|
||||
|
||||
@@ -35,3 +35,8 @@ See [the TCP replication documentation](tcp_replication.md).
|
||||
There are read-only version of the synapse storage layer in
|
||||
`synapse/replication/slave/storage` that use the response of the
|
||||
replication API to invalidate their caches.
|
||||
|
||||
### The TCP Replication Module
|
||||
Information about how the tcp replication module is structured, including how
|
||||
the classes interact, can be found in
|
||||
`synapse/replication/tcp/__init__.py`
|
||||
|
||||
@@ -206,6 +206,28 @@ backend matrix
|
||||
server matrix 127.0.0.1:8008
|
||||
```
|
||||
|
||||
|
||||
[Delegation](delegate.md) example:
|
||||
```
|
||||
frontend https
|
||||
acl matrix-well-known-client-path path /.well-known/matrix/client
|
||||
acl matrix-well-known-server-path path /.well-known/matrix/server
|
||||
use_backend matrix-well-known-client if matrix-well-known-client-path
|
||||
use_backend matrix-well-known-server if matrix-well-known-server-path
|
||||
|
||||
backend matrix-well-known-client
|
||||
http-after-response set-header Access-Control-Allow-Origin "*"
|
||||
http-after-response set-header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS"
|
||||
http-after-response set-header Access-Control-Allow-Headers "Origin, X-Requested-With, Content-Type, Accept, Authorization"
|
||||
http-request return status 200 content-type application/json string '{"m.homeserver":{"base_url":"https://matrix.example.com"},"m.identity_server":{"base_url":"https://identity.example.com"}}'
|
||||
|
||||
backend matrix-well-known-server
|
||||
http-after-response set-header Access-Control-Allow-Origin "*"
|
||||
http-after-response set-header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS"
|
||||
http-after-response set-header Access-Control-Allow-Headers "Origin, X-Requested-With, Content-Type, Accept, Authorization"
|
||||
http-request return status 200 content-type application/json string '{"m.server":"matrix.example.com:443"}'
|
||||
```
|
||||
|
||||
### Relayd
|
||||
|
||||
```
|
||||
|
||||
@@ -407,6 +407,11 @@ manhole_settings:
|
||||
# sign up in a short space of time never to return after their initial
|
||||
# session.
|
||||
#
|
||||
# The option `mau_appservice_trial_days` is similar to `mau_trial_days`, but
|
||||
# applies a different trial number if the user was registered by an appservice.
|
||||
# A value of 0 means no trial days are applied. Appservices not listed in this
|
||||
# dictionary use the value of `mau_trial_days` instead.
|
||||
#
|
||||
# 'mau_limit_alerting' is a means of limiting client side alerting
|
||||
# should the mau limit be reached. This is useful for small instances
|
||||
# where the admin has 5 mau seats (say) for 5 specific people and no
|
||||
@@ -417,6 +422,8 @@ manhole_settings:
|
||||
#max_mau_value: 50
|
||||
#mau_trial_days: 2
|
||||
#mau_limit_alerting: false
|
||||
#mau_appservice_trial_days:
|
||||
# "appservice-id": 1
|
||||
|
||||
# If enabled, the metrics for the number of monthly active users will
|
||||
# be populated, however no one will be limited. If limit_usage_by_mau
|
||||
@@ -709,11 +716,11 @@ retention:
|
||||
#
|
||||
#allow_profile_lookup_over_federation: false
|
||||
|
||||
# Uncomment to disable device display name lookup over federation. By default, the
|
||||
# Federation API allows other homeservers to obtain device display names of any user
|
||||
# on this homeserver. Defaults to 'true'.
|
||||
# Uncomment to allow device display name lookup over federation. By default, the
|
||||
# Federation API prevents other homeservers from obtaining the display names of
|
||||
# user devices on this homeserver. Defaults to 'false'.
|
||||
#
|
||||
#allow_device_name_lookup_over_federation: false
|
||||
#allow_device_name_lookup_over_federation: true
|
||||
|
||||
|
||||
## Caching ##
|
||||
@@ -1323,6 +1330,12 @@ oembed:
|
||||
#
|
||||
#registration_requires_token: true
|
||||
|
||||
# Allow users to submit a token during registration to bypass any required 3pid
|
||||
# steps configured in `registrations_require_3pid`.
|
||||
# Defaults to false, requiring that registration tokens (if enabled) complete a 3pid flow.
|
||||
#
|
||||
#enable_registration_token_3pid_bypass: false
|
||||
|
||||
# If set, allows registration of standard or admin accounts by anyone who
|
||||
# has the shared secret, even if registration is otherwise disabled.
|
||||
#
|
||||
|
||||
@@ -62,13 +62,6 @@ loggers:
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
|
||||
twisted:
|
||||
# We send the twisted logging directly to the file handler,
|
||||
# to work around https://github.com/matrix-org/synapse/issues/3471
|
||||
# when using "buffer" logger. Use "console" to log to stderr instead.
|
||||
handlers: [file]
|
||||
propagate: false
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
|
||||
|
||||
@@ -10,15 +10,15 @@ See the folder [system](https://github.com/matrix-org/synapse/tree/develop/docs/
|
||||
for the systemd unit files.
|
||||
|
||||
The folder [workers](https://github.com/matrix-org/synapse/tree/develop/docs/systemd-with-workers/workers/)
|
||||
contains an example configuration for the `federation_reader` worker.
|
||||
contains an example configuration for the `generic_worker` worker.
|
||||
|
||||
## Synapse configuration files
|
||||
|
||||
See [the worker documentation](../workers.md) for information on how to set up the
|
||||
configuration files and reverse-proxy correctly.
|
||||
Below is a sample `federation_reader` worker configuration file.
|
||||
Below is a sample `generic_worker` worker configuration file.
|
||||
```yaml
|
||||
{{#include workers/federation_reader.yaml}}
|
||||
{{#include workers/generic_worker.yaml}}
|
||||
```
|
||||
|
||||
Systemd manages daemonization itself, so ensure that none of the configuration
|
||||
@@ -61,9 +61,9 @@ systemctl stop matrix-synapse.target
|
||||
# Restart the master alone
|
||||
systemctl start matrix-synapse.service
|
||||
|
||||
# Restart a specific worker (eg. federation_reader); the master is
|
||||
# Restart a specific worker (eg. generic_worker); the master is
|
||||
# unaffected by this.
|
||||
systemctl restart matrix-synapse-worker@federation_reader.service
|
||||
systemctl restart matrix-synapse-worker@generic_worker.service
|
||||
|
||||
# Add a new worker (assuming all configs are set up already)
|
||||
systemctl enable matrix-synapse-worker@federation_writer.service
|
||||
|
||||
8
docs/systemd-with-workers/workers/background_worker.yaml
Normal file
8
docs/systemd-with-workers/workers/background_worker.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: background_worker
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/background-worker-log.yaml
|
||||
23
docs/systemd-with-workers/workers/event_persister.yaml
Normal file
23
docs/systemd-with-workers/workers/event_persister.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: event_persister1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
# Enable listener if this stream writer handles endpoints for the `typing` or
|
||||
# `to_device` streams. Uses a different port to the `replication` listener to
|
||||
# avoid exposing the `replication` listener publicly.
|
||||
#
|
||||
#- type: http
|
||||
# port: 8035
|
||||
# resources:
|
||||
# - names: [client]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/event-persister-log.yaml
|
||||
@@ -1,13 +0,0 @@
|
||||
worker_app: synapse.app.federation_reader
|
||||
worker_name: federation_reader1
|
||||
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8011
|
||||
resources:
|
||||
- names: [federation]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/federation-reader-log.yaml
|
||||
14
docs/systemd-with-workers/workers/generic_worker.yaml
Normal file
14
docs/systemd-with-workers/workers/generic_worker.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: generic_worker1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8083
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
|
||||
@@ -302,14 +302,14 @@ Here are a few things to try:
|
||||
|
||||
(Understanding the output is beyond the scope of this document!)
|
||||
|
||||
* You can test your Matrix homeserver TURN setup with https://test.voip.librepush.net/.
|
||||
* You can test your Matrix homeserver TURN setup with <https://test.voip.librepush.net/>.
|
||||
Note that this test is not fully reliable yet, so don't be discouraged if
|
||||
the test fails.
|
||||
[Here](https://github.com/matrix-org/voip-tester) is the github repo of the
|
||||
source of the tester, where you can file bug reports.
|
||||
|
||||
* There is a WebRTC test tool at
|
||||
https://webrtc.github.io/samples/src/content/peerconnection/trickle-ice/. To
|
||||
<https://webrtc.github.io/samples/src/content/peerconnection/trickle-ice/>. To
|
||||
use it, you will need a username/password for your TURN server. You can
|
||||
either:
|
||||
|
||||
|
||||
@@ -19,32 +19,36 @@ this document.
|
||||
packages](setup/installation.md#prebuilt-packages), you will need to follow the
|
||||
normal process for upgrading those packages.
|
||||
|
||||
- If Synapse was installed using pip then upgrade to the latest
|
||||
version by running:
|
||||
|
||||
```bash
|
||||
pip install --upgrade matrix-synapse
|
||||
```
|
||||
|
||||
- If Synapse was installed from source, then:
|
||||
|
||||
1. Activate the virtualenv before upgrading. For example, if
|
||||
Synapse is installed in a virtualenv in `~/synapse/env` then
|
||||
1. Obtain the latest version of the source code. Git users can run
|
||||
`git pull` to do this.
|
||||
|
||||
2. If you're running Synapse in a virtualenv, make sure to activate it before
|
||||
upgrading. For example, if Synapse is installed in a virtualenv in `~/synapse/env` then
|
||||
run:
|
||||
|
||||
```bash
|
||||
source ~/synapse/env/bin/activate
|
||||
```
|
||||
|
||||
2. If Synapse was installed using pip then upgrade to the latest
|
||||
version by running:
|
||||
|
||||
```bash
|
||||
pip install --upgrade matrix-synapse
|
||||
```
|
||||
|
||||
If Synapse was installed using git then upgrade to the latest
|
||||
version by running:
|
||||
|
||||
```bash
|
||||
git pull
|
||||
pip install --upgrade .
|
||||
```
|
||||
Include any relevant extras between square brackets, e.g. `pip install --upgrade ".[postgres,oidc]"`.
|
||||
|
||||
3. Restart Synapse:
|
||||
3. If you're using `poetry` to manage a Synapse installation, run:
|
||||
```bash
|
||||
poetry install
|
||||
```
|
||||
Include any relevant extras with `--extras`, e.g. `poetry install --extras postgres --extras oidc`.
|
||||
It's probably easiest to run `poetry install --extras all`.
|
||||
|
||||
4. Restart Synapse:
|
||||
|
||||
```bash
|
||||
synctl restart
|
||||
@@ -85,6 +89,58 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.59.0
|
||||
|
||||
## Device name lookup over federation has been disabled by default
|
||||
|
||||
The names of user devices are no longer visible to users on other homeservers by default.
|
||||
Device IDs are unaffected, as these are necessary to facilitate end-to-end encryption.
|
||||
|
||||
To re-enable this functionality, set the
|
||||
[`allow_device_name_lookup_over_federation`](https://matrix-org.github.io/synapse/v1.59/usage/configuration/config_documentation.html#federation)
|
||||
homeserver config option to `true`.
|
||||
|
||||
|
||||
## Deprecation of the `synapse.app.appservice` and `synapse.app.user_dir` worker application types
|
||||
|
||||
The `synapse.app.appservice` worker application type allowed you to configure a
|
||||
single worker to use to notify application services of new events, as long
|
||||
as this functionality was disabled on the main process with `notify_appservices: False`.
|
||||
Further, the `synapse.app.user_dir` worker application type allowed you to configure
|
||||
a single worker to be responsible for updating the user directory, as long as this
|
||||
was disabled on the main process with `update_user_directory: False`.
|
||||
|
||||
To unify Synapse's worker types, the `synapse.app.appservice` worker application
|
||||
type and the `notify_appservices` configuration option have been deprecated.
|
||||
The `synapse.app.user_dir` worker application type and `update_user_directory`
|
||||
configuration option have also been deprecated.
|
||||
|
||||
To get the same functionality as was provided by the deprecated options, it's now recommended that the `synapse.app.generic_worker`
|
||||
worker application type is used and that the `notify_appservices_from_worker` and/or
|
||||
`update_user_directory_from_worker` options are set to the name of a worker.
|
||||
|
||||
For the time being, the old options can be used alongside the new options to make
|
||||
it easier to transition between the two configurations, however please note that:
|
||||
|
||||
- the options must not contradict each other (otherwise Synapse won't start); and
|
||||
- the `notify_appservices` and `update_user_directory` options will be removed in a future release of Synapse.
|
||||
|
||||
Please see the [*Notifying Application Services*][v1_59_notify_ases_from] and
|
||||
[*Updating the User Directory*][v1_59_update_user_dir] sections of the worker
|
||||
documentation for more information.
|
||||
|
||||
[v1_59_notify_ases_from]: workers.md#notifying-application-services
|
||||
[v1_59_update_user_dir]: workers.md#updating-the-user-directory
|
||||
|
||||
|
||||
# Upgrading to v1.58.0
|
||||
|
||||
## Groups/communities feature has been disabled by default
|
||||
|
||||
The non-standard groups/communities feature in Synapse has been disabled by default
|
||||
and will be removed in Synapse v1.61.0.
|
||||
|
||||
|
||||
# Upgrading to v1.57.0
|
||||
|
||||
## Changes to database schema for application services
|
||||
|
||||
@@ -28,7 +28,7 @@ See the following for how to decode the dense data available from the default lo
|
||||
| NNNN | Total time waiting for response to DB queries across all parallel DB work from this request |
|
||||
| OOOO | Count of DB transactions performed |
|
||||
| PPPP | Response body size |
|
||||
| QQQQ | Response status code (prefixed with ! if the socket was closed before the response was generated) |
|
||||
| QQQQ | Response status code<br/>Suffixed with `!` if the socket was closed before the response was generated.<br/>A `499!` status code indicates that Synapse also cancelled request processing after the socket was closed.<br/> |
|
||||
| RRRR | Request |
|
||||
| SSSS | User-agent |
|
||||
| TTTT | Events fetched from DB to service this request (note that this does not include events fetched from the cache) |
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
## Some useful SQL queries for Synapse Admins
|
||||
|
||||
## Size of full matrix db
|
||||
`SELECT pg_size_pretty( pg_database_size( 'matrix' ) );`
|
||||
```sql
|
||||
SELECT pg_size_pretty( pg_database_size( 'matrix' ) );
|
||||
```
|
||||
|
||||
### Result example:
|
||||
```
|
||||
pg_size_pretty
|
||||
@@ -9,39 +12,19 @@ pg_size_pretty
|
||||
6420 MB
|
||||
(1 row)
|
||||
```
|
||||
## Show top 20 larger rooms by state events count
|
||||
```sql
|
||||
SELECT r.name, s.room_id, s.current_state_events
|
||||
FROM room_stats_current s
|
||||
LEFT JOIN room_stats_state r USING (room_id)
|
||||
ORDER BY current_state_events DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
and by state_group_events count:
|
||||
```sql
|
||||
SELECT rss.name, s.room_id, count(s.room_id) FROM state_groups_state s
|
||||
LEFT JOIN room_stats_state rss USING (room_id)
|
||||
GROUP BY s.room_id, rss.name
|
||||
ORDER BY count(s.room_id) DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
plus same, but with join removed for performance reasons:
|
||||
```sql
|
||||
SELECT s.room_id, count(s.room_id) FROM state_groups_state s
|
||||
GROUP BY s.room_id
|
||||
ORDER BY count(s.room_id) DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
## Show top 20 larger tables by row count
|
||||
```sql
|
||||
SELECT relname, n_live_tup as rows
|
||||
FROM pg_stat_user_tables
|
||||
SELECT relname, n_live_tup AS "rows"
|
||||
FROM pg_stat_user_tables
|
||||
ORDER BY n_live_tup DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
This query is quick, but may be very approximate, for exact number of rows use `SELECT COUNT(*) FROM <table_name>`.
|
||||
This query is quick, but may be very approximate, for exact number of rows use:
|
||||
```sql
|
||||
SELECT COUNT(*) FROM <table_name>;
|
||||
```
|
||||
|
||||
### Result example:
|
||||
```
|
||||
state_groups_state - 161687170
|
||||
@@ -66,46 +49,19 @@ device_lists_stream - 326903
|
||||
user_directory_search - 316433
|
||||
```
|
||||
|
||||
## Show top 20 rooms by new events count in last 1 day:
|
||||
```sql
|
||||
SELECT e.room_id, r.name, COUNT(e.event_id) cnt FROM events e
|
||||
LEFT JOIN room_stats_state r USING (room_id)
|
||||
WHERE e.origin_server_ts >= DATE_PART('epoch', NOW() - INTERVAL '1 day') * 1000 GROUP BY e.room_id, r.name ORDER BY cnt DESC LIMIT 20;
|
||||
```
|
||||
|
||||
## Show top 20 users on homeserver by sent events (messages) at last month:
|
||||
```sql
|
||||
SELECT user_id, SUM(total_events)
|
||||
FROM user_stats_historical
|
||||
WHERE TO_TIMESTAMP(end_ts/1000) AT TIME ZONE 'UTC' > date_trunc('day', now() - interval '1 month')
|
||||
GROUP BY user_id
|
||||
ORDER BY SUM(total_events) DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
## Show last 100 messages from needed user, with room names:
|
||||
```sql
|
||||
SELECT e.room_id, r.name, e.event_id, e.type, e.content, j.json FROM events e
|
||||
LEFT JOIN event_json j USING (room_id)
|
||||
LEFT JOIN room_stats_state r USING (room_id)
|
||||
WHERE sender = '@LOGIN:example.com'
|
||||
AND e.type = 'm.room.message'
|
||||
ORDER BY stream_ordering DESC
|
||||
LIMIT 100;
|
||||
```
|
||||
|
||||
## Show top 20 larger tables by storage size
|
||||
```sql
|
||||
SELECT nspname || '.' || relname AS "relation",
|
||||
pg_size_pretty(pg_total_relation_size(C.oid)) AS "total_size"
|
||||
FROM pg_class C
|
||||
LEFT JOIN pg_namespace N ON (N.oid = C.relnamespace)
|
||||
pg_size_pretty(pg_total_relation_size(c.oid)) AS "total_size"
|
||||
FROM pg_class c
|
||||
LEFT JOIN pg_namespace n ON (n.oid = c.relnamespace)
|
||||
WHERE nspname NOT IN ('pg_catalog', 'information_schema')
|
||||
AND C.relkind <> 'i'
|
||||
AND c.relkind <> 'i'
|
||||
AND nspname !~ '^pg_toast'
|
||||
ORDER BY pg_total_relation_size(C.oid) DESC
|
||||
ORDER BY pg_total_relation_size(c.oid) DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
### Result example:
|
||||
```
|
||||
public.state_groups_state - 27 GB
|
||||
@@ -130,8 +86,93 @@ public.device_lists_remote_cache - 124 MB
|
||||
public.state_group_edges - 122 MB
|
||||
```
|
||||
|
||||
## Show top 20 larger rooms by state events count
|
||||
You get the same information when you use the
|
||||
[admin API](../../admin_api/rooms.md#list-room-api)
|
||||
and set parameter `order_by=state_events`.
|
||||
|
||||
```sql
|
||||
SELECT r.name, s.room_id, s.current_state_events
|
||||
FROM room_stats_current s
|
||||
LEFT JOIN room_stats_state r USING (room_id)
|
||||
ORDER BY current_state_events DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
and by state_group_events count:
|
||||
```sql
|
||||
SELECT rss.name, s.room_id, COUNT(s.room_id)
|
||||
FROM state_groups_state s
|
||||
LEFT JOIN room_stats_state rss USING (room_id)
|
||||
GROUP BY s.room_id, rss.name
|
||||
ORDER BY COUNT(s.room_id) DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
plus same, but with join removed for performance reasons:
|
||||
```sql
|
||||
SELECT s.room_id, COUNT(s.room_id)
|
||||
FROM state_groups_state s
|
||||
GROUP BY s.room_id
|
||||
ORDER BY COUNT(s.room_id) DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
## Show top 20 rooms by new events count in last 1 day:
|
||||
```sql
|
||||
SELECT e.room_id, r.name, COUNT(e.event_id) cnt
|
||||
FROM events e
|
||||
LEFT JOIN room_stats_state r USING (room_id)
|
||||
WHERE e.origin_server_ts >= DATE_PART('epoch', NOW() - INTERVAL '1 day') * 1000
|
||||
GROUP BY e.room_id, r.name
|
||||
ORDER BY cnt DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
## Show top 20 users on homeserver by sent events (messages) at last month:
|
||||
Caution. This query does not use any indexes, can be slow and create load on the database.
|
||||
```sql
|
||||
SELECT COUNT(*), sender
|
||||
FROM events
|
||||
WHERE (type = 'm.room.encrypted' OR type = 'm.room.message')
|
||||
AND origin_server_ts >= DATE_PART('epoch', NOW() - INTERVAL '1 month') * 1000
|
||||
GROUP BY sender
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
## Show last 100 messages from needed user, with room names:
|
||||
```sql
|
||||
SELECT e.room_id, r.name, e.event_id, e.type, e.content, j.json
|
||||
FROM events e
|
||||
LEFT JOIN event_json j USING (room_id)
|
||||
LEFT JOIN room_stats_state r USING (room_id)
|
||||
WHERE sender = '@LOGIN:example.com'
|
||||
AND e.type = 'm.room.message'
|
||||
ORDER BY stream_ordering DESC
|
||||
LIMIT 100;
|
||||
```
|
||||
|
||||
## Show rooms with names, sorted by events in this rooms
|
||||
`echo "select event_json.room_id,room_stats_state.name from event_json,room_stats_state where room_stats_state.room_id=event_json.room_id" | psql synapse | sort | uniq -c | sort -n`
|
||||
|
||||
**Sort and order with bash**
|
||||
```bash
|
||||
echo "SELECT event_json.room_id, room_stats_state.name FROM event_json, room_stats_state \
|
||||
WHERE room_stats_state.room_id = event_json.room_id" | psql -d synapse -h localhost -U synapse_user -t \
|
||||
| sort | uniq -c | sort -n
|
||||
```
|
||||
Documentation for `psql` command line parameters: https://www.postgresql.org/docs/current/app-psql.html
|
||||
|
||||
**Sort and order with SQL**
|
||||
```sql
|
||||
SELECT COUNT(*), event_json.room_id, room_stats_state.name
|
||||
FROM event_json, room_stats_state
|
||||
WHERE room_stats_state.room_id = event_json.room_id
|
||||
GROUP BY event_json.room_id, room_stats_state.name
|
||||
ORDER BY COUNT(*) DESC
|
||||
LIMIT 50;
|
||||
```
|
||||
|
||||
### Result example:
|
||||
```
|
||||
9459 !FPUfgzXYWTKgIrwKxW:matrix.org | This Week in Matrix
|
||||
@@ -145,12 +186,22 @@ public.state_group_edges - 122 MB
|
||||
```
|
||||
|
||||
## Lookup room state info by list of room_id
|
||||
You get the same information when you use the
|
||||
[admin API](../../admin_api/rooms.md#room-details-api).
|
||||
```sql
|
||||
SELECT rss.room_id, rss.name, rss.canonical_alias, rss.topic, rss.encryption, rsc.joined_members, rsc.local_users_in_room, rss.join_rules
|
||||
FROM room_stats_state rss
|
||||
LEFT JOIN room_stats_current rsc USING (room_id)
|
||||
WHERE room_id IN (WHERE room_id IN (
|
||||
'!OGEhHVWSdvArJzumhm:matrix.org',
|
||||
'!YTvKGNlinIzlkMTVRl:matrix.org'
|
||||
)
|
||||
```
|
||||
SELECT rss.room_id, rss.name, rss.canonical_alias, rss.topic, rss.encryption,
|
||||
rsc.joined_members, rsc.local_users_in_room, rss.join_rules
|
||||
FROM room_stats_state rss
|
||||
LEFT JOIN room_stats_current rsc USING (room_id)
|
||||
WHERE room_id IN ( WHERE room_id IN (
|
||||
'!OGEhHVWSdvArJzumhm:matrix.org',
|
||||
'!YTvKGNlinIzlkMTVRl:matrix.org'
|
||||
);
|
||||
```
|
||||
|
||||
## Show users and devices that have not been online for a while
|
||||
```sql
|
||||
SELECT user_id, device_id, user_agent, TO_TIMESTAMP(last_seen / 1000) AS "last_seen"
|
||||
FROM devices
|
||||
WHERE last_seen < DATE_PART('epoch', NOW() - INTERVAL '3 month') * 1000;
|
||||
```
|
||||
|
||||
3468
docs/usage/configuration/config_documentation.md
Normal file
3468
docs/usage/configuration/config_documentation.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -75,6 +75,20 @@ function setTocEntry() {
|
||||
* Populate sidebar on load
|
||||
*/
|
||||
window.addEventListener('load', () => {
|
||||
// Prevent rendering the table of contents of the "print book" page, as it
|
||||
// will end up being rendered into the output (in a broken-looking way)
|
||||
|
||||
// Get the name of the current page (i.e. 'print.html')
|
||||
const pageNameExtension = window.location.pathname.split('/').pop();
|
||||
|
||||
// Split off the extension (as '.../print' is also a valid page name), which
|
||||
// should result in 'print'
|
||||
const pageName = pageNameExtension.split('.')[0];
|
||||
if (pageName === "print") {
|
||||
// Don't render the table of contents on this page
|
||||
return;
|
||||
}
|
||||
|
||||
// Only create table of contents if there is more than one header on the page
|
||||
if (headers.length <= 1) {
|
||||
return;
|
||||
|
||||
@@ -138,22 +138,7 @@ as the `listeners` option in the shared config.
|
||||
For example:
|
||||
|
||||
```yaml
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: worker1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8083
|
||||
resources:
|
||||
- names:
|
||||
- client
|
||||
- federation
|
||||
|
||||
worker_log_config: /home/matrix/synapse/config/worker1_log_config.yaml
|
||||
{{#include systemd-with-workers/workers/generic_worker.yaml}}
|
||||
```
|
||||
|
||||
...is a full configuration for a generic worker instance, which will expose a
|
||||
@@ -343,9 +328,9 @@ effects of bursts of events from that bridge on events sent by normal users.
|
||||
|
||||
#### Stream writers
|
||||
|
||||
Additionally, there is *experimental* support for moving writing of specific
|
||||
streams (such as events) off of the main process to a particular worker. (This
|
||||
is only supported with Redis-based replication.)
|
||||
Additionally, the writing of specific streams (such as events) can be moved off
|
||||
of the main process to a particular worker.
|
||||
(This is only supported with Redis-based replication.)
|
||||
|
||||
To enable this, the worker must have a HTTP replication listener configured,
|
||||
have a `worker_name` and be listed in the `instance_map` config. The same worker
|
||||
@@ -365,6 +350,12 @@ stream_writers:
|
||||
events: event_persister1
|
||||
```
|
||||
|
||||
An example for a stream writer instance:
|
||||
|
||||
```yaml
|
||||
{{#include systemd-with-workers/workers/event_persister.yaml}}
|
||||
```
|
||||
|
||||
Some of the streams have associated endpoints which, for maximum efficiency, should
|
||||
be routed to the workers handling that stream. See below for the currently supported
|
||||
streams and the endpoints associated with them:
|
||||
@@ -422,7 +413,7 @@ the stream writer for the `presence` stream:
|
||||
|
||||
#### Background tasks
|
||||
|
||||
There is also *experimental* support for moving background tasks to a separate
|
||||
There is also support for moving background tasks to a separate
|
||||
worker. Background tasks are run periodically or started via replication. Exactly
|
||||
which tasks are configured to run depends on your Synapse configuration (e.g. if
|
||||
stats is enabled).
|
||||
@@ -435,9 +426,49 @@ the shared configuration would include:
|
||||
run_background_tasks_on: background_worker
|
||||
```
|
||||
|
||||
You might also wish to investigate the `update_user_directory` and
|
||||
You might also wish to investigate the `update_user_directory_from_worker` and
|
||||
`media_instance_running_background_jobs` settings.
|
||||
|
||||
An example for a dedicated background worker instance:
|
||||
|
||||
```yaml
|
||||
{{#include systemd-with-workers/workers/background_worker.yaml}}
|
||||
```
|
||||
|
||||
#### Updating the User Directory
|
||||
|
||||
You can designate one generic worker to update the user directory.
|
||||
|
||||
Specify its name in the shared configuration as follows:
|
||||
|
||||
```yaml
|
||||
update_user_directory_from_worker: worker_name
|
||||
```
|
||||
|
||||
This work cannot be load-balanced; please ensure the main process is restarted
|
||||
after setting this option in the shared configuration!
|
||||
|
||||
This style of configuration supersedes the legacy `synapse.app.user_dir`
|
||||
worker application type.
|
||||
|
||||
|
||||
#### Notifying Application Services
|
||||
|
||||
You can designate one generic worker to send output traffic to Application Services.
|
||||
|
||||
Specify its name in the shared configuration as follows:
|
||||
|
||||
```yaml
|
||||
notify_appservices_from_worker: worker_name
|
||||
```
|
||||
|
||||
This work cannot be load-balanced; please ensure the main process is restarted
|
||||
after setting this option in the shared configuration!
|
||||
|
||||
This style of configuration supersedes the legacy `synapse.app.appservice`
|
||||
worker application type.
|
||||
|
||||
|
||||
### `synapse.app.pusher`
|
||||
|
||||
Handles sending push notifications to sygnal and email. Doesn't handle any
|
||||
@@ -456,6 +487,9 @@ pusher_instances:
|
||||
|
||||
### `synapse.app.appservice`
|
||||
|
||||
**Deprecated as of Synapse v1.59.** [Use `synapse.app.generic_worker` with the
|
||||
`notify_appservices_from_worker` option instead.](#notifying-application-services)
|
||||
|
||||
Handles sending output traffic to Application Services. Doesn't handle any
|
||||
REST endpoints itself, but you should set `notify_appservices: False` in the
|
||||
shared configuration file to stop the main synapse sending appservice notifications.
|
||||
@@ -523,6 +557,9 @@ Note that if a reverse proxy is used , then `/_matrix/media/` must be routed for
|
||||
|
||||
### `synapse.app.user_dir`
|
||||
|
||||
**Deprecated as of Synapse v1.59.** [Use `synapse.app.generic_worker` with the
|
||||
`update_user_directory_from_worker` option instead.](#updating-the-user-directory)
|
||||
|
||||
Handles searches in the user directory. It can handle REST endpoints matching
|
||||
the following regular expressions:
|
||||
|
||||
@@ -617,14 +654,14 @@ The following shows an example setup using Redis and a reverse proxy:
|
||||
| Main | | Generic | | Generic | | Event |
|
||||
| Process | | Worker 1 | | Worker 2 | | Persister |
|
||||
+--------------+ +--------------+ +--------------+ +--------------+
|
||||
^ ^ | ^ | | ^ | ^ ^
|
||||
| | | | | | | | | |
|
||||
| | | | | HTTP | | | | |
|
||||
| +----------+<--|---|---------+ | | | |
|
||||
| | +-------------|-->+----------+ |
|
||||
| | | |
|
||||
| | | |
|
||||
v v v v
|
||||
====================================================================
|
||||
^ ^ | ^ | | ^ | | ^ ^
|
||||
| | | | | | | | | | |
|
||||
| | | | | HTTP | | | | | |
|
||||
| +----------+<--|---|---------+<--|---|---------+ | |
|
||||
| | +-------------|-->+-------------+ |
|
||||
| | | |
|
||||
| | | |
|
||||
v v v v
|
||||
======================================================================
|
||||
Redis pub/sub channel
|
||||
```
|
||||
|
||||
67
mypy.ini
67
mypy.ini
@@ -7,13 +7,13 @@ show_error_codes = True
|
||||
show_traceback = True
|
||||
mypy_path = stubs
|
||||
warn_unreachable = True
|
||||
warn_unused_ignores = True
|
||||
local_partial_types = True
|
||||
no_implicit_optional = True
|
||||
|
||||
files =
|
||||
docker/,
|
||||
scripts-dev/,
|
||||
setup.py,
|
||||
synapse/,
|
||||
tests/
|
||||
|
||||
@@ -24,10 +24,6 @@ files =
|
||||
# https://docs.python.org/3/library/re.html#re.X
|
||||
exclude = (?x)
|
||||
^(
|
||||
|scripts-dev/build_debian_packages.py
|
||||
|scripts-dev/federation_client.py
|
||||
|scripts-dev/release.py
|
||||
|
||||
|synapse/storage/databases/__init__.py
|
||||
|synapse/storage/databases/main/cache.py
|
||||
|synapse/storage/databases/main/devices.py
|
||||
@@ -135,6 +131,11 @@ disallow_untyped_defs = True
|
||||
[mypy-synapse.metrics.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.metrics._reactor_metrics]
|
||||
# This module imports select.epoll. That exists on Linux, but doesn't on macOS.
|
||||
# See https://github.com/matrix-org/synapse/pull/11771.
|
||||
warn_unused_ignores = False
|
||||
|
||||
[mypy-synapse.module_api.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
@@ -234,69 +235,32 @@ disallow_untyped_defs = True
|
||||
;; The `typeshed` project maintains stubs here:
|
||||
;; https://github.com/python/typeshed/tree/master/stubs
|
||||
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
|
||||
;; which we can pull in as a dev dependency by adding to `setup.py`'s
|
||||
;; `CONDITIONAL_REQUIREMENTS["mypy"]` list.
|
||||
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
|
||||
;; `[tool.poetry.dev-dependencies]` list.
|
||||
|
||||
[mypy-authlib.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-bcrypt]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-canonicaljson]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-constantly]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-daemonize]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-h11]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hiredis]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hyperlink]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ijson.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-importlib_metadata.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jaeger_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-josepy.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jwt.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-nacl.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
# Note: WIP stubs available at
|
||||
# https://github.com/microsoft/python-type-stubs/tree/64934207f523ad6b611e6cfe039d85d7175d7d0d/netaddr
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-parameterized.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-phonenumbers.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-prometheus_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pymacaroons.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@@ -309,23 +273,14 @@ ignore_missing_imports = True
|
||||
[mypy-saml2.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-sentry_sdk]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-service_identity.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-signedjson.*]
|
||||
[mypy-srvlookup.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-treq.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-twisted.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-zope]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-incremental.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
194
poetry.lock
generated
194
poetry.lock
generated
@@ -1,11 +1,3 @@
|
||||
[[package]]
|
||||
name = "appdirs"
|
||||
version = "1.4.4"
|
||||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "21.4.0"
|
||||
@@ -49,17 +41,6 @@ six = "*"
|
||||
[package.extras]
|
||||
visualize = ["graphviz (>0.5.1)", "Twisted (>=16.1.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "baron"
|
||||
version = "0.10.1"
|
||||
description = "Full Syntax Tree for python to make writing refactoring code a realist task"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
rply = "*"
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
version = "3.2.0"
|
||||
@@ -328,14 +309,15 @@ smmap = ">=3.0.1,<6"
|
||||
|
||||
[[package]]
|
||||
name = "gitpython"
|
||||
version = "3.1.14"
|
||||
description = "Python Git Library"
|
||||
version = "3.1.27"
|
||||
description = "GitPython is a python library used to interact with Git repositories"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.4"
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
gitdb = ">=4.0.1,<5"
|
||||
typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""}
|
||||
|
||||
[[package]]
|
||||
name = "hiredis"
|
||||
@@ -558,17 +540,20 @@ test = ["tox", "twisted", "aiounittest"]
|
||||
|
||||
[[package]]
|
||||
name = "matrix-synapse-ldap3"
|
||||
version = "0.1.5"
|
||||
version = "0.2.0"
|
||||
description = "An LDAP3 auth provider for Synapse"
|
||||
category = "main"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
ldap3 = ">=2.8"
|
||||
service_identity = "*"
|
||||
service-identity = "*"
|
||||
Twisted = ">=15.1.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["matrix-synapse", "tox", "ldaptor", "mypy (==0.910)", "types-setuptools", "black (==21.9b0)", "flake8 (==4.0.1)", "isort (==5.9.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "mccabe"
|
||||
version = "0.6.1"
|
||||
@@ -587,7 +572,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.931"
|
||||
version = "0.950"
|
||||
description = "Optional static typing for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -595,13 +580,14 @@ python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.4.3"
|
||||
tomli = ">=1.1.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""}
|
||||
typing-extensions = ">=3.10"
|
||||
|
||||
[package.extras]
|
||||
dmypy = ["psutil (>=4.0)"]
|
||||
python2 = ["typed-ast (>=1.4.0,<2)"]
|
||||
reports = ["lxml"]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
@@ -613,14 +599,14 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mypy-zope"
|
||||
version = "0.3.5"
|
||||
version = "0.3.7"
|
||||
description = "Plugin for mypy to support zope interfaces"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
mypy = "0.931"
|
||||
mypy = "0.950"
|
||||
"zope.interface" = "*"
|
||||
"zope.schema" = "*"
|
||||
|
||||
@@ -717,7 +703,7 @@ test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock
|
||||
|
||||
[[package]]
|
||||
name = "prometheus-client"
|
||||
version = "0.13.1"
|
||||
version = "0.14.0"
|
||||
description = "Python client for the Prometheus monitoring system."
|
||||
category = "main"
|
||||
optional = false
|
||||
@@ -981,20 +967,6 @@ Pygments = ">=2.5.1"
|
||||
[package.extras]
|
||||
md = ["cmarkgfm (>=0.8.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "redbaron"
|
||||
version = "0.9.2"
|
||||
description = "Abstraction on top of baron, a FST for python to make writing refactoring code a realistic task"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
baron = ">=0.7"
|
||||
|
||||
[package.extras]
|
||||
notebook = ["pygments"]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.27.1"
|
||||
@@ -1035,17 +1007,6 @@ python-versions = ">=3.7"
|
||||
[package.extras]
|
||||
idna2008 = ["idna"]
|
||||
|
||||
[[package]]
|
||||
name = "rply"
|
||||
version = "0.7.8"
|
||||
description = "A pure Python Lex/Yacc that works with RPython"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
appdirs = "*"
|
||||
|
||||
[[package]]
|
||||
name = "secretstorage"
|
||||
version = "3.3.1"
|
||||
@@ -1060,7 +1021,7 @@ jeepney = ">=0.6"
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "1.5.7"
|
||||
version = "1.5.11"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
category = "main"
|
||||
optional = true
|
||||
@@ -1285,7 +1246,7 @@ urllib3 = ">=1.26.0"
|
||||
|
||||
[[package]]
|
||||
name = "twisted"
|
||||
version = "22.2.0"
|
||||
version = "22.4.0"
|
||||
description = "An asynchronous networking framework written in Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
@@ -1305,19 +1266,20 @@ typing-extensions = ">=3.6.5"
|
||||
"zope.interface" = ">=4.4.2"
|
||||
|
||||
[package.extras]
|
||||
all_non_platform = ["cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
||||
all_non_platform = ["cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
||||
conch = ["pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)"]
|
||||
conch_nacl = ["pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pynacl"]
|
||||
contextvars = ["contextvars (>=2.4,<3)"]
|
||||
dev = ["towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pyflakes (>=2.2,<3.0)", "twistedchecker (>=0.7,<1.0)", "coverage (>=6b1,<7)", "python-subunit (>=1.4,<2.0)", "pydoctor (>=21.9.0,<21.10.0)"]
|
||||
dev_release = ["towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pydoctor (>=21.9.0,<21.10.0)"]
|
||||
http2 = ["h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)"]
|
||||
macos_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
||||
mypy = ["mypy (==0.930)", "mypy-zope (==0.3.4)", "types-setuptools", "types-pyopenssl", "towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pyflakes (>=2.2,<3.0)", "twistedchecker (>=0.7,<1.0)", "coverage (>=6b1,<7)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "python-subunit (>=1.4,<2.0)", "contextvars (>=2.4,<3)", "pydoctor (>=21.9.0,<21.10.0)"]
|
||||
osx_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
||||
http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"]
|
||||
macos_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
||||
mypy = ["mypy (==0.930)", "mypy-zope (==0.3.4)", "types-setuptools", "types-pyopenssl", "towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pyflakes (>=2.2,<3.0)", "twistedchecker (>=0.7,<1.0)", "coverage (>=6b1,<7)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pynacl", "pywin32 (!=226)", "python-subunit (>=1.4,<2.0)", "contextvars (>=2.4,<3)", "pydoctor (>=21.9.0,<21.10.0)"]
|
||||
osx_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
||||
serial = ["pyserial (>=3.0)", "pywin32 (!=226)"]
|
||||
test = ["cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)"]
|
||||
tls = ["pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)"]
|
||||
windows_platform = ["pywin32 (!=226)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
||||
windows_platform = ["pywin32 (!=226)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
|
||||
|
||||
[[package]]
|
||||
name = "twisted-iocpsupport"
|
||||
@@ -1355,6 +1317,14 @@ category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-commonmark"
|
||||
version = "0.9.2"
|
||||
description = "Typing stubs for commonmark"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-cryptography"
|
||||
version = "3.3.15"
|
||||
@@ -1401,7 +1371,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-pillow"
|
||||
version = "9.0.6"
|
||||
version = "9.0.15"
|
||||
description = "Typing stubs for Pillow"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@@ -1576,7 +1546,7 @@ docs = ["sphinx", "repoze.sphinx.autointerface"]
|
||||
test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"]
|
||||
|
||||
[extras]
|
||||
all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "pyjwt", "txredisapi", "hiredis"]
|
||||
all = ["matrix-synapse-ldap3", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pysaml2", "authlib", "lxml", "sentry-sdk", "jaeger-client", "opentracing", "pyjwt", "txredisapi", "hiredis", "Pympler"]
|
||||
cache_memory = ["Pympler"]
|
||||
jwt = ["pyjwt"]
|
||||
matrix-synapse-ldap3 = ["matrix-synapse-ldap3"]
|
||||
@@ -1592,14 +1562,10 @@ url_preview = ["lxml"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "964ad29eaf7fd02749a4e735818f3bc0ba729c2f4b9e3213f0daa02643508b16"
|
||||
python-versions = "^3.7.1"
|
||||
content-hash = "d39d5ac5d51c014581186b7691999b861058b569084c525523baf70b77f292b1"
|
||||
|
||||
[metadata.files]
|
||||
appdirs = [
|
||||
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
|
||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
||||
]
|
||||
attrs = [
|
||||
{file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
|
||||
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
|
||||
@@ -1612,10 +1578,6 @@ automat = [
|
||||
{file = "Automat-20.2.0-py2.py3-none-any.whl", hash = "sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111"},
|
||||
{file = "Automat-20.2.0.tar.gz", hash = "sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33"},
|
||||
]
|
||||
baron = [
|
||||
{file = "baron-0.10.1-py2.py3-none-any.whl", hash = "sha256:befb33f4b9e832c7cd1e3cf0eafa6dd3cb6ed4cb2544245147c019936f4e0a8a"},
|
||||
{file = "baron-0.10.1.tar.gz", hash = "sha256:af822ad44d4eb425c8516df4239ac4fdba9fdb398ef77e4924cd7c9b4045bc2f"},
|
||||
]
|
||||
bcrypt = [
|
||||
{file = "bcrypt-3.2.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b589229207630484aefe5899122fb938a5b017b0f4349f769b8c13e78d99a8fd"},
|
||||
{file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"},
|
||||
@@ -1814,8 +1776,8 @@ gitdb = [
|
||||
{file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"},
|
||||
]
|
||||
gitpython = [
|
||||
{file = "GitPython-3.1.14-py3-none-any.whl", hash = "sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b"},
|
||||
{file = "GitPython-3.1.14.tar.gz", hash = "sha256:be27633e7509e58391f10207cd32b2a6cf5b908f92d9cd30da2e514e1137af61"},
|
||||
{file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"},
|
||||
{file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"},
|
||||
]
|
||||
hiredis = [
|
||||
{file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"},
|
||||
@@ -2084,7 +2046,8 @@ matrix-common = [
|
||||
{file = "matrix_common-1.1.0.tar.gz", hash = "sha256:a8238748afc2b37079818367fed5156f355771b07c8ff0a175934f47e0ff3276"},
|
||||
]
|
||||
matrix-synapse-ldap3 = [
|
||||
{file = "matrix-synapse-ldap3-0.1.5.tar.gz", hash = "sha256:9fdf8df7c8ec756642aa0fea53b31c0b2f1924f70d7f049a2090b523125456fe"},
|
||||
{file = "matrix-synapse-ldap3-0.2.0.tar.gz", hash = "sha256:91a0715b43a41ec3033244174fca20846836da98fda711fb01687f7199eecd2e"},
|
||||
{file = "matrix_synapse_ldap3-0.2.0-py3-none-any.whl", hash = "sha256:0128ca7c3058987adc2e8a88463bb46879915bfd3d373309632813b353e30f9f"},
|
||||
]
|
||||
mccabe = [
|
||||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||
@@ -2127,34 +2090,37 @@ msgpack = [
|
||||
{file = "msgpack-1.0.3.tar.gz", hash = "sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.931-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c5b42d0815e15518b1f0990cff7a705805961613e701db60387e6fb663fe78a"},
|
||||
{file = "mypy-0.931-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c89702cac5b302f0c5d33b172d2b55b5df2bede3344a2fbed99ff96bddb2cf00"},
|
||||
{file = "mypy-0.931-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:300717a07ad09525401a508ef5d105e6b56646f7942eb92715a1c8d610149714"},
|
||||
{file = "mypy-0.931-cp310-cp310-win_amd64.whl", hash = "sha256:7b3f6f557ba4afc7f2ce6d3215d5db279bcf120b3cfd0add20a5d4f4abdae5bc"},
|
||||
{file = "mypy-0.931-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1bf752559797c897cdd2c65f7b60c2b6969ffe458417b8d947b8340cc9cec08d"},
|
||||
{file = "mypy-0.931-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4365c60266b95a3f216a3047f1d8e3f895da6c7402e9e1ddfab96393122cc58d"},
|
||||
{file = "mypy-0.931-cp36-cp36m-win_amd64.whl", hash = "sha256:1b65714dc296a7991000b6ee59a35b3f550e0073411ac9d3202f6516621ba66c"},
|
||||
{file = "mypy-0.931-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e839191b8da5b4e5d805f940537efcaa13ea5dd98418f06dc585d2891d228cf0"},
|
||||
{file = "mypy-0.931-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:50c7346a46dc76a4ed88f3277d4959de8a2bd0a0fa47fa87a4cde36fe247ac05"},
|
||||
{file = "mypy-0.931-cp37-cp37m-win_amd64.whl", hash = "sha256:d8f1ff62f7a879c9fe5917b3f9eb93a79b78aad47b533911b853a757223f72e7"},
|
||||
{file = "mypy-0.931-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9fe20d0872b26c4bba1c1be02c5340de1019530302cf2dcc85c7f9fc3252ae0"},
|
||||
{file = "mypy-0.931-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1b06268df7eb53a8feea99cbfff77a6e2b205e70bf31743e786678ef87ee8069"},
|
||||
{file = "mypy-0.931-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8c11003aaeaf7cc2d0f1bc101c1cc9454ec4cc9cb825aef3cafff8a5fdf4c799"},
|
||||
{file = "mypy-0.931-cp38-cp38-win_amd64.whl", hash = "sha256:d9d2b84b2007cea426e327d2483238f040c49405a6bf4074f605f0156c91a47a"},
|
||||
{file = "mypy-0.931-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ff3bf387c14c805ab1388185dd22d6b210824e164d4bb324b195ff34e322d166"},
|
||||
{file = "mypy-0.931-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5b56154f8c09427bae082b32275a21f500b24d93c88d69a5e82f3978018a0266"},
|
||||
{file = "mypy-0.931-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ca7f8c4b1584d63c9a0f827c37ba7a47226c19a23a753d52e5b5eddb201afcd"},
|
||||
{file = "mypy-0.931-cp39-cp39-win_amd64.whl", hash = "sha256:74f7eccbfd436abe9c352ad9fb65872cc0f1f0a868e9d9c44db0893440f0c697"},
|
||||
{file = "mypy-0.931-py3-none-any.whl", hash = "sha256:1171f2e0859cfff2d366da2c7092b06130f232c636a3f7301e3feb8b41f6377d"},
|
||||
{file = "mypy-0.931.tar.gz", hash = "sha256:0038b21890867793581e4cb0d810829f5fd4441aa75796b53033af3aa30430ce"},
|
||||
{file = "mypy-0.950-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b"},
|
||||
{file = "mypy-0.950-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0"},
|
||||
{file = "mypy-0.950-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e7647df0f8fc947388e6251d728189cfadb3b1e558407f93254e35abc026e22"},
|
||||
{file = "mypy-0.950-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eaff8156016487c1af5ffa5304c3e3fd183edcb412f3e9c72db349faf3f6e0eb"},
|
||||
{file = "mypy-0.950-cp310-cp310-win_amd64.whl", hash = "sha256:563514c7dc504698fb66bb1cf897657a173a496406f1866afae73ab5b3cdb334"},
|
||||
{file = "mypy-0.950-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dd4d670eee9610bf61c25c940e9ade2d0ed05eb44227275cce88701fee014b1f"},
|
||||
{file = "mypy-0.950-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca75ecf2783395ca3016a5e455cb322ba26b6d33b4b413fcdedfc632e67941dc"},
|
||||
{file = "mypy-0.950-cp36-cp36m-win_amd64.whl", hash = "sha256:6003de687c13196e8a1243a5e4bcce617d79b88f83ee6625437e335d89dfebe2"},
|
||||
{file = "mypy-0.950-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c653e4846f287051599ed8f4b3c044b80e540e88feec76b11044ddc5612ffed"},
|
||||
{file = "mypy-0.950-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e19736af56947addedce4674c0971e5dceef1b5ec7d667fe86bcd2b07f8f9075"},
|
||||
{file = "mypy-0.950-cp37-cp37m-win_amd64.whl", hash = "sha256:ef7beb2a3582eb7a9f37beaf38a28acfd801988cde688760aea9e6cc4832b10b"},
|
||||
{file = "mypy-0.950-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0112752a6ff07230f9ec2f71b0d3d4e088a910fdce454fdb6553e83ed0eced7d"},
|
||||
{file = "mypy-0.950-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee0a36edd332ed2c5208565ae6e3a7afc0eabb53f5327e281f2ef03a6bc7687a"},
|
||||
{file = "mypy-0.950-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77423570c04aca807508a492037abbd72b12a1fb25a385847d191cd50b2c9605"},
|
||||
{file = "mypy-0.950-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ce6a09042b6da16d773d2110e44f169683d8cc8687e79ec6d1181a72cb028d2"},
|
||||
{file = "mypy-0.950-cp38-cp38-win_amd64.whl", hash = "sha256:5b231afd6a6e951381b9ef09a1223b1feabe13625388db48a8690f8daa9b71ff"},
|
||||
{file = "mypy-0.950-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0384d9f3af49837baa92f559d3fa673e6d2652a16550a9ee07fc08c736f5e6f8"},
|
||||
{file = "mypy-0.950-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1fdeb0a0f64f2a874a4c1f5271f06e40e1e9779bf55f9567f149466fc7a55038"},
|
||||
{file = "mypy-0.950-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:61504b9a5ae166ba5ecfed9e93357fd51aa693d3d434b582a925338a2ff57fd2"},
|
||||
{file = "mypy-0.950-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a952b8bc0ae278fc6316e6384f67bb9a396eb30aced6ad034d3a76120ebcc519"},
|
||||
{file = "mypy-0.950-cp39-cp39-win_amd64.whl", hash = "sha256:eaea21d150fb26d7b4856766e7addcf929119dd19fc832b22e71d942835201ef"},
|
||||
{file = "mypy-0.950-py3-none-any.whl", hash = "sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb"},
|
||||
{file = "mypy-0.950.tar.gz", hash = "sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de"},
|
||||
]
|
||||
mypy-extensions = [
|
||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
||||
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
|
||||
]
|
||||
mypy-zope = [
|
||||
{file = "mypy-zope-0.3.5.tar.gz", hash = "sha256:489e7da1c2af887f2cfe3496995fc247f296512b495b57817edddda9d22308f3"},
|
||||
{file = "mypy_zope-0.3.5-py3-none-any.whl", hash = "sha256:3bd0cc9a3e5933b02931af4b214ba32a4f4ff98adb30c979ce733857db91a18b"},
|
||||
{file = "mypy-zope-0.3.7.tar.gz", hash = "sha256:9da171e78e8ef7ac8922c86af1a62f1b7f3244f121020bd94a2246bc3f33c605"},
|
||||
{file = "mypy_zope-0.3.7-py3-none-any.whl", hash = "sha256:9c7637d066e4d1bafa0651abc091c752009769098043b236446e6725be2bc9c2"},
|
||||
]
|
||||
netaddr = [
|
||||
{file = "netaddr-0.8.0-py2.py3-none-any.whl", hash = "sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac"},
|
||||
@@ -2225,8 +2191,8 @@ platformdirs = [
|
||||
{file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"},
|
||||
]
|
||||
prometheus-client = [
|
||||
{file = "prometheus_client-0.13.1-py3-none-any.whl", hash = "sha256:357a447fd2359b0a1d2e9b311a0c5778c330cfbe186d880ad5a6b39884652316"},
|
||||
{file = "prometheus_client-0.13.1.tar.gz", hash = "sha256:ada41b891b79fca5638bd5cfe149efa86512eaa55987893becd2c6d8d0a5dfc5"},
|
||||
{file = "prometheus_client-0.14.0-py3-none-any.whl", hash = "sha256:f4aba3fdd1735852049f537c1f0ab177159b7ab76f271ecc4d2f45aa2a1d01f2"},
|
||||
{file = "prometheus_client-0.14.0.tar.gz", hash = "sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750"},
|
||||
]
|
||||
psycopg2 = [
|
||||
{file = "psycopg2-2.9.3-cp310-cp310-win32.whl", hash = "sha256:083707a696e5e1c330af2508d8fab36f9700b26621ccbcb538abe22e15485362"},
|
||||
@@ -2407,10 +2373,6 @@ readme-renderer = [
|
||||
{file = "readme_renderer-33.0-py3-none-any.whl", hash = "sha256:f02cee0c4de9636b5a62b6be50c9742427ba1b956aad1d938bfb087d0d72ccdf"},
|
||||
{file = "readme_renderer-33.0.tar.gz", hash = "sha256:e3b53bc84bd6af054e4cc1fe3567dc1ae19f554134221043a3f8c674e22209db"},
|
||||
]
|
||||
redbaron = [
|
||||
{file = "redbaron-0.9.2-py2.py3-none-any.whl", hash = "sha256:d01032b6a848b5521a8d6ef72486315c2880f420956870cdd742e2b5a09b9bab"},
|
||||
{file = "redbaron-0.9.2.tar.gz", hash = "sha256:472d0739ca6b2240bb2278ae428604a75472c9c12e86c6321e8c016139c0132f"},
|
||||
]
|
||||
requests = [
|
||||
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
|
||||
{file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
|
||||
@@ -2423,17 +2385,13 @@ rfc3986 = [
|
||||
{file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"},
|
||||
{file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"},
|
||||
]
|
||||
rply = [
|
||||
{file = "rply-0.7.8-py2.py3-none-any.whl", hash = "sha256:28ffd11d656c48aeb8c508eb382acd6a0bd906662624b34388751732a27807e7"},
|
||||
{file = "rply-0.7.8.tar.gz", hash = "sha256:2a808ac25a4580a9991fc304d64434e299a8fc75760574492f242cbb5bb301c9"},
|
||||
]
|
||||
secretstorage = [
|
||||
{file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"},
|
||||
{file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"},
|
||||
]
|
||||
sentry-sdk = [
|
||||
{file = "sentry-sdk-1.5.7.tar.gz", hash = "sha256:aa52da941c56b5a76fd838f8e9e92a850bf893a9eb1e33ffce6c21431d07ee30"},
|
||||
{file = "sentry_sdk-1.5.7-py2.py3-none-any.whl", hash = "sha256:411a8495bd18cf13038e5749e4710beb4efa53da6351f67b4c2f307c2d9b6d49"},
|
||||
{file = "sentry-sdk-1.5.11.tar.gz", hash = "sha256:6c01d9d0b65935fd275adc120194737d1df317dce811e642cbf0394d0d37a007"},
|
||||
{file = "sentry_sdk-1.5.11-py2.py3-none-any.whl", hash = "sha256:c17179183cac614e900cbd048dab03f49a48e2820182ec686c25e7ce46f8548f"},
|
||||
]
|
||||
service-identity = [
|
||||
{file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"},
|
||||
@@ -2592,8 +2550,8 @@ twine = [
|
||||
{file = "twine-3.8.0.tar.gz", hash = "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19"},
|
||||
]
|
||||
twisted = [
|
||||
{file = "Twisted-22.2.0-py3-none-any.whl", hash = "sha256:5c63c149eb6b8fe1e32a0215b1cef96fabdba04f705d8efb9174b1ccf5b49d49"},
|
||||
{file = "Twisted-22.2.0.tar.gz", hash = "sha256:57f32b1f6838facb8c004c89467840367ad38e9e535f8252091345dba500b4f2"},
|
||||
{file = "Twisted-22.4.0-py3-none-any.whl", hash = "sha256:f9f7a91f94932477a9fc3b169d57f54f96c6e74a23d78d9ce54039a7f48928a2"},
|
||||
{file = "Twisted-22.4.0.tar.gz", hash = "sha256:a047990f57dfae1e0bd2b7df2526d4f16dcdc843774dc108b78c52f2a5f13680"},
|
||||
]
|
||||
twisted-iocpsupport = [
|
||||
{file = "twisted-iocpsupport-1.0.2.tar.gz", hash = "sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9"},
|
||||
@@ -2643,6 +2601,10 @@ types-bleach = [
|
||||
{file = "types-bleach-4.1.4.tar.gz", hash = "sha256:2d30c2c4fb6854088ac636471352c9a51bf6c089289800d2a8060820a01cd43a"},
|
||||
{file = "types_bleach-4.1.4-py3-none-any.whl", hash = "sha256:edffe173ed6d7b6f3543036a96204a9319c3bf6c3645917b14274e43f000cc9b"},
|
||||
]
|
||||
types-commonmark = [
|
||||
{file = "types-commonmark-0.9.2.tar.gz", hash = "sha256:b894b67750c52fd5abc9a40a9ceb9da4652a391d75c1b480bba9cef90f19fc86"},
|
||||
{file = "types_commonmark-0.9.2-py3-none-any.whl", hash = "sha256:56f20199a1f9a2924443211a0ef97f8b15a8a956a7f4e9186be6950bf38d6d02"},
|
||||
]
|
||||
types-cryptography = [
|
||||
{file = "types-cryptography-3.3.15.tar.gz", hash = "sha256:a7983a75a7b88a18f88832008f0ef140b8d1097888ec1a0824ec8fb7e105273b"},
|
||||
{file = "types_cryptography-3.3.15-py3-none-any.whl", hash = "sha256:d9b0dd5465d7898d400850e7f35e5518aa93a7e23d3e11757cd81b4777089046"},
|
||||
@@ -2664,8 +2626,8 @@ types-opentracing = [
|
||||
{file = "types_opentracing-2.4.7-py3-none-any.whl", hash = "sha256:861fb8103b07cf717f501dd400cb274ca9992552314d4d6c7a824b11a215e512"},
|
||||
]
|
||||
types-pillow = [
|
||||
{file = "types-Pillow-9.0.6.tar.gz", hash = "sha256:79b350b1188c080c27558429f1e119e69c9f020b877a82df761d9283070e0185"},
|
||||
{file = "types_Pillow-9.0.6-py3-none-any.whl", hash = "sha256:bd1e0a844fc718398aa265bf50fcad550fc520cc54f80e5ffeb7b3226b3cc507"},
|
||||
{file = "types-Pillow-9.0.15.tar.gz", hash = "sha256:d2e385fe5c192e75970f18accce69f5c2a9f186f3feb578a9b91cd6fdf64211d"},
|
||||
{file = "types_Pillow-9.0.15-py3-none-any.whl", hash = "sha256:c9646595dfafdf8b63d4b1443292ead17ee0fc7b18a143e497b68e0ea2dc1eb6"},
|
||||
]
|
||||
types-psycopg2 = [
|
||||
{file = "types-psycopg2-2.9.9.tar.gz", hash = "sha256:4f9d4d52eeb343dc00fd5ed4f1513a8a5c18efba0a072eb82706d15cf4f20a2e"},
|
||||
|
||||
@@ -54,7 +54,7 @@ skip_gitignore = true
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.57.0"
|
||||
version = "1.59.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -100,7 +100,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.7"
|
||||
python = "^3.7.1"
|
||||
|
||||
# Mandatory Dependencies
|
||||
# ----------------------
|
||||
@@ -142,8 +142,10 @@ netaddr = ">=0.7.18"
|
||||
# add a lower bound to the Jinja2 dependency.
|
||||
Jinja2 = ">=3.0"
|
||||
bleach = ">=1.4.3"
|
||||
# We use `ParamSpec`, which was added in `typing-extensions` 3.10.0.0.
|
||||
typing-extensions = ">=3.10.0"
|
||||
# We use `ParamSpec` and `Concatenate`, which were added in `typing-extensions` 3.10.0.0.
|
||||
# Additionally we need https://github.com/python/typing/pull/817 to allow types to be
|
||||
# generic over ParamSpecs.
|
||||
typing-extensions = ">=3.10.0.1"
|
||||
# We enforce that we have a `cryptography` version that bundles an `openssl`
|
||||
# with the latest security patches.
|
||||
cryptography = ">=3.4.7"
|
||||
@@ -231,10 +233,11 @@ all = [
|
||||
"jaeger-client", "opentracing",
|
||||
# jwt
|
||||
"pyjwt",
|
||||
#redis
|
||||
"txredisapi", "hiredis"
|
||||
# redis
|
||||
"txredisapi", "hiredis",
|
||||
# cache_memory
|
||||
"pympler",
|
||||
# omitted:
|
||||
# - cache_memory: this is an experimental option
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
# - systemd: this is a system-based requirement
|
||||
]
|
||||
@@ -248,9 +251,10 @@ flake8-bugbear = "==21.3.2"
|
||||
flake8 = "*"
|
||||
|
||||
# Typechecking
|
||||
mypy = "==0.931"
|
||||
mypy-zope = "==0.3.5"
|
||||
mypy = "*"
|
||||
mypy-zope = "*"
|
||||
types-bleach = ">=4.1.0"
|
||||
types-commonmark = ">=0.9.2"
|
||||
types-jsonschema = ">=3.2.0"
|
||||
types-opentracing = ">=2.4.2"
|
||||
types-Pillow = ">=8.3.4"
|
||||
@@ -270,8 +274,8 @@ idna = ">=2.5"
|
||||
|
||||
# The following are used by the release script
|
||||
click = "==8.1.0"
|
||||
redbaron = "==0.9.2"
|
||||
GitPython = "==3.1.14"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
commonmark = "==0.9.1"
|
||||
pygithub = "==1.55"
|
||||
# The following are executed as commands by the release script.
|
||||
@@ -280,5 +284,5 @@ twine = "*"
|
||||
towncrier = ">=18.6.0rc1"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
@@ -17,7 +17,8 @@ import subprocess
|
||||
import sys
|
||||
import threading
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import Optional, Sequence
|
||||
from types import FrameType
|
||||
from typing import Collection, Optional, Sequence, Set
|
||||
|
||||
DISTS = (
|
||||
"debian:buster", # oldstable: EOL 2022-08
|
||||
@@ -26,6 +27,7 @@ DISTS = (
|
||||
"debian:sid",
|
||||
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
||||
"ubuntu:impish", # 21.10 (EOL 2022-07)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04)
|
||||
)
|
||||
|
||||
DESC = """\
|
||||
@@ -40,15 +42,17 @@ projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
class Builder(object):
|
||||
def __init__(
|
||||
self, redirect_stdout=False, docker_build_args: Optional[Sequence[str]] = None
|
||||
self,
|
||||
redirect_stdout: bool = False,
|
||||
docker_build_args: Optional[Sequence[str]] = None,
|
||||
):
|
||||
self.redirect_stdout = redirect_stdout
|
||||
self._docker_build_args = tuple(docker_build_args or ())
|
||||
self.active_containers = set()
|
||||
self.active_containers: Set[str] = set()
|
||||
self._lock = threading.Lock()
|
||||
self._failed = False
|
||||
|
||||
def run_build(self, dist, skip_tests=False):
|
||||
def run_build(self, dist: str, skip_tests: bool = False) -> None:
|
||||
"""Build deb for a single distribution"""
|
||||
|
||||
if self._failed:
|
||||
@@ -62,7 +66,7 @@ class Builder(object):
|
||||
self._failed = True
|
||||
raise
|
||||
|
||||
def _inner_build(self, dist, skip_tests=False):
|
||||
def _inner_build(self, dist: str, skip_tests: bool = False) -> None:
|
||||
tag = dist.split(":", 1)[1]
|
||||
|
||||
# Make the dir where the debs will live.
|
||||
@@ -137,7 +141,7 @@ class Builder(object):
|
||||
stdout.close()
|
||||
print("Completed build of %s" % (dist,))
|
||||
|
||||
def kill_containers(self):
|
||||
def kill_containers(self) -> None:
|
||||
with self._lock:
|
||||
active = list(self.active_containers)
|
||||
|
||||
@@ -155,8 +159,10 @@ class Builder(object):
|
||||
self.active_containers.remove(c)
|
||||
|
||||
|
||||
def run_builds(builder, dists, jobs=1, skip_tests=False):
|
||||
def sig(signum, _frame):
|
||||
def run_builds(
|
||||
builder: Builder, dists: Collection[str], jobs: int = 1, skip_tests: bool = False
|
||||
) -> None:
|
||||
def sig(signum: int, _frame: Optional[FrameType]) -> None:
|
||||
print("Caught SIGINT")
|
||||
builder.kill_containers()
|
||||
|
||||
|
||||
@@ -43,6 +43,8 @@ fi
|
||||
# Build the base Synapse image from the local checkout
|
||||
docker build -t matrixdotorg/synapse -f "docker/Dockerfile" .
|
||||
|
||||
extra_test_args=()
|
||||
|
||||
# If we're using workers, modify the docker files slightly.
|
||||
if [[ -n "$WORKERS" ]]; then
|
||||
# Build the workers docker image (from the base Synapse image).
|
||||
@@ -52,7 +54,14 @@ if [[ -n "$WORKERS" ]]; then
|
||||
COMPLEMENT_DOCKERFILE=SynapseWorkers.Dockerfile
|
||||
|
||||
# And provide some more configuration to complement.
|
||||
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=60
|
||||
|
||||
# It can take quite a while to spin up a worker-mode Synapse for the first
|
||||
# time (the main problem is that we start 14 python processes for each test,
|
||||
# and complement likes to do two of them in parallel).
|
||||
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=120
|
||||
|
||||
# ... and it takes longer than 10m to run the whole suite.
|
||||
extra_test_args+=("-timeout=60m")
|
||||
else
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
COMPLEMENT_DOCKERFILE=Dockerfile
|
||||
@@ -64,4 +73,4 @@ docker build -t $COMPLEMENT_BASE_IMAGE -f "docker/complement/$COMPLEMENT_DOCKERF
|
||||
# Run the tests!
|
||||
echo "Images built; running complement"
|
||||
cd "$COMPLEMENT_DIR"
|
||||
go test -v -tags synapse_blacklist,msc2716,msc3030 -count=1 "$@" ./tests/...
|
||||
go test -v -tags synapse_blacklist,msc2716,msc3030,faster_joins -count=1 "${extra_test_args[@]}" "$@" ./tests/...
|
||||
|
||||
@@ -38,7 +38,7 @@ import argparse
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from typing import Any, Optional
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
@@ -47,13 +47,14 @@ import signedjson.types
|
||||
import srvlookup
|
||||
import yaml
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3 import HTTPConnectionPool
|
||||
|
||||
# uncomment the following to enable debug logging of http requests
|
||||
# from httplib import HTTPConnection
|
||||
# HTTPConnection.debuglevel = 1
|
||||
|
||||
|
||||
def encode_base64(input_bytes):
|
||||
def encode_base64(input_bytes: bytes) -> str:
|
||||
"""Encode bytes as a base64 string without any padding."""
|
||||
|
||||
input_len = len(input_bytes)
|
||||
@@ -63,7 +64,7 @@ def encode_base64(input_bytes):
|
||||
return output_string
|
||||
|
||||
|
||||
def encode_canonical_json(value):
|
||||
def encode_canonical_json(value: object) -> bytes:
|
||||
return json.dumps(
|
||||
value,
|
||||
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||
@@ -124,8 +125,13 @@ def request(
|
||||
authorization_headers = []
|
||||
|
||||
for key, sig in signed_json["signatures"][origin_name].items():
|
||||
header = 'X-Matrix origin=%s,key="%s",sig="%s"' % (origin_name, key, sig)
|
||||
authorization_headers.append(header.encode("ascii"))
|
||||
header = 'X-Matrix origin=%s,key="%s",sig="%s",destination="%s"' % (
|
||||
origin_name,
|
||||
key,
|
||||
sig,
|
||||
destination,
|
||||
)
|
||||
authorization_headers.append(header)
|
||||
print("Authorization: %s" % header, file=sys.stderr)
|
||||
|
||||
dest = "matrix://%s%s" % (destination, path)
|
||||
@@ -134,7 +140,10 @@ def request(
|
||||
s = requests.Session()
|
||||
s.mount("matrix://", MatrixConnectionAdapter())
|
||||
|
||||
headers = {"Host": destination, "Authorization": authorization_headers[0]}
|
||||
headers: Dict[str, str] = {
|
||||
"Host": destination,
|
||||
"Authorization": authorization_headers[0],
|
||||
}
|
||||
|
||||
if method == "POST":
|
||||
headers["Content-Type"] = "application/json"
|
||||
@@ -149,7 +158,7 @@ def request(
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Signs and sends a federation request to a matrix homeserver"
|
||||
)
|
||||
@@ -207,6 +216,7 @@ def main():
|
||||
if not args.server_name or not args.signing_key:
|
||||
read_args_from_config(args)
|
||||
|
||||
assert isinstance(args.signing_key, str)
|
||||
algorithm, version, key_base64 = args.signing_key.split()
|
||||
key = signedjson.key.decode_signing_key_base64(algorithm, version, key_base64)
|
||||
|
||||
@@ -228,7 +238,7 @@ def main():
|
||||
print("")
|
||||
|
||||
|
||||
def read_args_from_config(args):
|
||||
def read_args_from_config(args: argparse.Namespace) -> None:
|
||||
with open(args.config, "r") as fh:
|
||||
config = yaml.safe_load(fh)
|
||||
|
||||
@@ -245,7 +255,7 @@ def read_args_from_config(args):
|
||||
|
||||
class MatrixConnectionAdapter(HTTPAdapter):
|
||||
@staticmethod
|
||||
def lookup(s, skip_well_known=False):
|
||||
def lookup(s: str, skip_well_known: bool = False) -> Tuple[str, int]:
|
||||
if s[-1] == "]":
|
||||
# ipv6 literal (with no port)
|
||||
return s, 8448
|
||||
@@ -271,7 +281,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
return s, 8448
|
||||
|
||||
@staticmethod
|
||||
def get_well_known(server_name):
|
||||
def get_well_known(server_name: str) -> Optional[str]:
|
||||
uri = "https://%s/.well-known/matrix/server" % (server_name,)
|
||||
print("fetching %s" % (uri,), file=sys.stderr)
|
||||
|
||||
@@ -294,7 +304,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
print("Invalid response from %s: %s" % (uri, e), file=sys.stderr)
|
||||
return None
|
||||
|
||||
def get_connection(self, url, proxies=None):
|
||||
def get_connection(
|
||||
self, url: str, proxies: Optional[Dict[str, str]] = None
|
||||
) -> HTTPConnectionPool:
|
||||
parsed = urlparse.urlparse(url)
|
||||
|
||||
(host, port) = self.lookup(parsed.netloc)
|
||||
|
||||
@@ -79,8 +79,20 @@ else
|
||||
# If we were not asked to lint changed files, and no paths were found as a result,
|
||||
# then lint everything!
|
||||
if [[ -z ${files+x} ]]; then
|
||||
# Lint all source code files and directories
|
||||
files=( "." )
|
||||
# CI runs each linter on the entire checkout, e.g. `black .`. So don't
|
||||
# rely on this list to *find* lint targets if that misses a file; instead;
|
||||
# use it to exclude files from linters when this can't be done by config.
|
||||
#
|
||||
# To check which files the linters examine, use:
|
||||
# black --verbose . 2>&1 | \grep -v ignored
|
||||
# isort --show-files .
|
||||
# flake8 --verbose . # This isn't a great option
|
||||
# mypy has explicit config in mypy.ini; there is also mypy --verbose
|
||||
files=(
|
||||
"synapse" "docker" "tests"
|
||||
"scripts-dev"
|
||||
"contrib" "synmark" "stubs" ".ci"
|
||||
)
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
can crop up, e.g the cache descriptors.
|
||||
"""
|
||||
|
||||
from typing import Callable, Optional
|
||||
from typing import Callable, Optional, Type
|
||||
|
||||
from mypy.nodes import ARG_NAMED_OPT
|
||||
from mypy.plugin import MethodSigContext, Plugin
|
||||
@@ -94,7 +94,7 @@ def cached_function_method_signature(ctx: MethodSigContext) -> CallableType:
|
||||
return signature
|
||||
|
||||
|
||||
def plugin(version: str):
|
||||
def plugin(version: str) -> Type[SynapsePlugin]:
|
||||
# This is the entry point of the plugin, and let's us deal with the fact
|
||||
# that the mypy plugin interface is *not* stable by looking at the version
|
||||
# string.
|
||||
|
||||
@@ -25,19 +25,20 @@ import sys
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import List, Optional, Tuple
|
||||
from typing import Any, List, Optional, cast
|
||||
|
||||
import attr
|
||||
import click
|
||||
import commonmark
|
||||
import git
|
||||
import redbaron
|
||||
from click.exceptions import ClickException
|
||||
from github import Github
|
||||
from packaging import version
|
||||
|
||||
|
||||
def run_until_successful(command, *args, **kwargs):
|
||||
def run_until_successful(
|
||||
command: str, *args: Any, **kwargs: Any
|
||||
) -> subprocess.CompletedProcess:
|
||||
while True:
|
||||
completed_process = subprocess.run(command, *args, **kwargs)
|
||||
exit_code = completed_process.returncode
|
||||
@@ -51,7 +52,7 @@ def run_until_successful(command, *args, **kwargs):
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
def cli() -> None:
|
||||
"""An interactive script to walk through the parts of creating a release.
|
||||
|
||||
Requires the dev dependencies be installed, which can be done via:
|
||||
@@ -69,11 +70,12 @@ def cli():
|
||||
# ... wait for assets to build ...
|
||||
|
||||
./scripts-dev/release.py publish
|
||||
|
||||
./scripts-dev/release.py upload
|
||||
|
||||
# Optional: generate some nice links for the announcement
|
||||
|
||||
./scripts-dev/release.py upload
|
||||
./scripts-dev/release.py announce
|
||||
|
||||
If the env var GH_TOKEN (or GITHUB_TOKEN) is set, or passed into the
|
||||
`tag`/`publish` command, then a new draft release will be created/published.
|
||||
@@ -81,25 +83,19 @@ def cli():
|
||||
|
||||
|
||||
@cli.command()
|
||||
def prepare():
|
||||
def prepare() -> None:
|
||||
"""Do the initial stages of creating a release, including creating release
|
||||
branch, updating changelog and pushing to GitHub.
|
||||
"""
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
try:
|
||||
repo = git.Repo()
|
||||
except git.InvalidGitRepositoryError:
|
||||
raise click.ClickException("Not in Synapse repo.")
|
||||
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException("Uncommitted changes exist.")
|
||||
repo = get_repo_and_check_clean_checkout()
|
||||
|
||||
click.secho("Updating git repo...")
|
||||
repo.remote().fetch()
|
||||
|
||||
# Get the current version and AST from root Synapse module.
|
||||
current_version, parsed_synapse_ast, version_node = parse_version_from_module()
|
||||
current_version = get_package_version()
|
||||
|
||||
# Figure out what sort of release we're doing and calcuate the new version.
|
||||
rc = click.confirm("RC", default=True)
|
||||
@@ -161,22 +157,21 @@ def prepare():
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Switch to the release branch.
|
||||
parsed_new_version = version.parse(new_version)
|
||||
# Cast safety: parse() won't return a version.LegacyVersion from our
|
||||
# version string format.
|
||||
parsed_new_version = cast(version.Version, version.parse(new_version))
|
||||
|
||||
# We assume for debian changelogs that we only do RCs or full releases.
|
||||
assert not parsed_new_version.is_devrelease
|
||||
assert not parsed_new_version.is_postrelease
|
||||
|
||||
release_branch_name = (
|
||||
f"release-v{parsed_new_version.major}.{parsed_new_version.minor}"
|
||||
)
|
||||
release_branch_name = get_release_branch_name(parsed_new_version)
|
||||
release_branch = find_ref(repo, release_branch_name)
|
||||
if release_branch:
|
||||
if release_branch.is_remote():
|
||||
# If the release branch only exists on the remote we check it out
|
||||
# locally.
|
||||
repo.git.checkout(release_branch_name)
|
||||
release_branch = repo.active_branch
|
||||
else:
|
||||
# If a branch doesn't exist we create one. We ask which one branch it
|
||||
# should be based off, defaulting to sensible values depending on the
|
||||
@@ -198,25 +193,25 @@ def prepare():
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Check out the base branch and ensure it's up to date
|
||||
repo.head.reference = base_branch
|
||||
repo.head.set_reference(base_branch, "check out the base branch")
|
||||
repo.head.reset(index=True, working_tree=True)
|
||||
if not base_branch.is_remote():
|
||||
update_branch(repo)
|
||||
|
||||
# Create the new release branch
|
||||
release_branch = repo.create_head(release_branch_name, commit=base_branch)
|
||||
# Type ignore will no longer be needed after GitPython 3.1.28.
|
||||
# See https://github.com/gitpython-developers/GitPython/pull/1419
|
||||
repo.create_head(release_branch_name, commit=base_branch) # type: ignore[arg-type]
|
||||
|
||||
# Switch to the release branch and ensure its up to date.
|
||||
# Switch to the release branch and ensure it's up to date.
|
||||
repo.git.checkout(release_branch_name)
|
||||
update_branch(repo)
|
||||
|
||||
# Update the `__version__` variable and write it back to the file.
|
||||
version_node.value = '"' + new_version + '"'
|
||||
with open("synapse/__init__.py", "w") as f:
|
||||
f.write(parsed_synapse_ast.dumps())
|
||||
# Update the version specified in pyproject.toml.
|
||||
subprocess.check_output(["poetry", "version", new_version])
|
||||
|
||||
# Generate changelogs.
|
||||
generate_and_write_changelog(current_version)
|
||||
generate_and_write_changelog(current_version, new_version)
|
||||
|
||||
# Generate debian changelogs
|
||||
if parsed_new_version.pre is not None:
|
||||
@@ -229,7 +224,7 @@ def prepare():
|
||||
debian_version = new_version
|
||||
|
||||
run_until_successful(
|
||||
f'dch -M -v {debian_version} "New synapse release {debian_version}."',
|
||||
f'dch -M -v {debian_version} "New Synapse release {new_version}."',
|
||||
shell=True,
|
||||
)
|
||||
run_until_successful('dch -M -r -D stable ""', shell=True)
|
||||
@@ -267,35 +262,43 @@ def prepare():
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
||||
def tag(gh_token: Optional[str]):
|
||||
def tag(gh_token: Optional[str]) -> None:
|
||||
"""Tags the release and generates a draft GitHub release"""
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
try:
|
||||
repo = git.Repo()
|
||||
except git.InvalidGitRepositoryError:
|
||||
raise click.ClickException("Not in Synapse repo.")
|
||||
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException("Uncommitted changes exist.")
|
||||
repo = get_repo_and_check_clean_checkout()
|
||||
|
||||
click.secho("Updating git repo...")
|
||||
repo.remote().fetch()
|
||||
|
||||
# Find out the version and tag name.
|
||||
current_version, _, _ = parse_version_from_module()
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
|
||||
# Check we haven't released this version.
|
||||
if tag_name in repo.tags:
|
||||
raise click.ClickException(f"Tag {tag_name} already exists!\n")
|
||||
|
||||
# Check we're on the right release branch
|
||||
release_branch = get_release_branch_name(current_version)
|
||||
if repo.active_branch.name != release_branch:
|
||||
click.echo(
|
||||
f"Need to be on the release branch ({release_branch}) before tagging. "
|
||||
f"Currently on ({repo.active_branch.name})."
|
||||
)
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Get the appropriate changelogs and tag.
|
||||
changes = get_changes_for_version(current_version)
|
||||
|
||||
click.echo_via_pager(changes)
|
||||
if click.confirm("Edit text?", default=False):
|
||||
changes = click.edit(changes, require_save=False)
|
||||
edited_changes = click.edit(changes, require_save=False)
|
||||
# This assert is for mypy's benefit. click's docs are a little unclear, but
|
||||
# when `require_save=False`, not saving the temp file in the editor returns
|
||||
# the original string.
|
||||
assert edited_changes is not None
|
||||
changes = edited_changes
|
||||
|
||||
repo.create_tag(tag_name, message=changes, sign=True)
|
||||
|
||||
@@ -349,22 +352,16 @@ def tag(gh_token: Optional[str]):
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
||||
def publish(gh_token: str):
|
||||
"""Publish release."""
|
||||
def publish(gh_token: str) -> None:
|
||||
"""Publish release on GitHub."""
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
try:
|
||||
repo = git.Repo()
|
||||
except git.InvalidGitRepositoryError:
|
||||
raise click.ClickException("Not in Synapse repo.")
|
||||
get_repo_and_check_clean_checkout()
|
||||
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException("Uncommitted changes exist.")
|
||||
|
||||
current_version, _, _ = parse_version_from_module()
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
|
||||
if not click.confirm(f"Publish {tag_name}?", default=True):
|
||||
if not click.confirm(f"Publish release {tag_name} on GitHub?", default=True):
|
||||
return
|
||||
|
||||
# Publish the draft release
|
||||
@@ -392,12 +389,19 @@ def publish(gh_token: str):
|
||||
|
||||
|
||||
@cli.command()
|
||||
def upload():
|
||||
def upload() -> None:
|
||||
"""Upload release to pypi."""
|
||||
|
||||
current_version, _, _ = parse_version_from_module()
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
|
||||
# Check we have the right tag checked out.
|
||||
repo = get_repo_and_check_clean_checkout()
|
||||
tag = repo.tag(f"refs/tags/{tag_name}")
|
||||
if repo.head.commit != tag.commit:
|
||||
click.echo("Tag {tag_name} (tag.commit) is not currently checked out!")
|
||||
click.get_current_context().abort()
|
||||
|
||||
pypi_asset_names = [
|
||||
f"matrix_synapse-{current_version}-py3-none-any.whl",
|
||||
f"matrix-synapse-{current_version}.tar.gz",
|
||||
@@ -420,17 +424,17 @@ def upload():
|
||||
|
||||
|
||||
@cli.command()
|
||||
def announce():
|
||||
def announce() -> None:
|
||||
"""Generate markdown to announce the release."""
|
||||
|
||||
current_version, _, _ = parse_version_from_module()
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
|
||||
click.echo(
|
||||
f"""
|
||||
Hi everyone. Synapse {current_version} has just been released.
|
||||
|
||||
[notes](https://github.com/matrix-org/synapse/releases/tag/{tag_name}) |\
|
||||
[notes](https://github.com/matrix-org/synapse/releases/tag/{tag_name}) | \
|
||||
[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \
|
||||
[debs](https://packages.matrix.org/debian/) | \
|
||||
[pypi](https://pypi.org/project/matrix-synapse/{current_version}/)"""
|
||||
@@ -454,53 +458,43 @@ Announce the release in
|
||||
)
|
||||
|
||||
|
||||
def parse_version_from_module() -> Tuple[
|
||||
version.Version, redbaron.RedBaron, redbaron.Node
|
||||
]:
|
||||
# Parse the AST and load the `__version__` node so that we can edit it
|
||||
# later.
|
||||
with open("synapse/__init__.py") as f:
|
||||
red = redbaron.RedBaron(f.read())
|
||||
def get_package_version() -> version.Version:
|
||||
version_string = subprocess.check_output(["poetry", "version", "--short"]).decode(
|
||||
"utf-8"
|
||||
)
|
||||
return version.Version(version_string)
|
||||
|
||||
version_node = None
|
||||
for node in red:
|
||||
if node.type != "assignment":
|
||||
continue
|
||||
|
||||
if node.target.type != "name":
|
||||
continue
|
||||
def get_release_branch_name(version_number: version.Version) -> str:
|
||||
return f"release-v{version_number.major}.{version_number.minor}"
|
||||
|
||||
if node.target.value != "__version__":
|
||||
continue
|
||||
|
||||
version_node = node
|
||||
break
|
||||
|
||||
if not version_node:
|
||||
print("Failed to find '__version__' definition in synapse/__init__.py")
|
||||
sys.exit(1)
|
||||
|
||||
# Parse the current version.
|
||||
current_version = version.parse(version_node.value.value.strip('"'))
|
||||
assert isinstance(current_version, version.Version)
|
||||
|
||||
return current_version, red, version_node
|
||||
def get_repo_and_check_clean_checkout() -> git.Repo:
|
||||
"""Get the project repo and check it's not got any uncommitted changes."""
|
||||
try:
|
||||
repo = git.Repo()
|
||||
except git.InvalidGitRepositoryError:
|
||||
raise click.ClickException("Not in Synapse repo.")
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException("Uncommitted changes exist.")
|
||||
return repo
|
||||
|
||||
|
||||
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
||||
"""Find the branch/ref, looking first locally then in the remote."""
|
||||
if ref_name in repo.refs:
|
||||
return repo.refs[ref_name]
|
||||
if ref_name in repo.references:
|
||||
return repo.references[ref_name]
|
||||
elif ref_name in repo.remote().refs:
|
||||
return repo.remote().refs[ref_name]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def update_branch(repo: git.Repo):
|
||||
def update_branch(repo: git.Repo) -> None:
|
||||
"""Ensure branch is up to date if it has a remote"""
|
||||
if repo.active_branch.tracking_branch():
|
||||
repo.git.merge(repo.active_branch.tracking_branch().name)
|
||||
tracking_branch = repo.active_branch.tracking_branch()
|
||||
if tracking_branch:
|
||||
repo.git.merge(tracking_branch.name)
|
||||
|
||||
|
||||
def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
@@ -564,11 +558,15 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
return "\n".join(version_changelog)
|
||||
|
||||
|
||||
def generate_and_write_changelog(current_version: version.Version):
|
||||
def generate_and_write_changelog(
|
||||
current_version: version.Version, new_version: str
|
||||
) -> None:
|
||||
# We do this by getting a draft so that we can edit it before writing to the
|
||||
# changelog.
|
||||
result = run_until_successful(
|
||||
"python3 -m towncrier --draft", shell=True, capture_output=True
|
||||
f"python3 -m towncrier build --draft --version {new_version}",
|
||||
shell=True,
|
||||
capture_output=True,
|
||||
)
|
||||
new_changes = result.stdout.decode("utf-8")
|
||||
new_changes = new_changes.replace(
|
||||
@@ -584,8 +582,8 @@ def generate_and_write_changelog(current_version: version.Version):
|
||||
f.write(existing_content)
|
||||
|
||||
# Remove all the news fragments
|
||||
for f in glob.iglob("changelog.d/*.*"):
|
||||
os.remove(f)
|
||||
for filename in glob.iglob("changelog.d/*.*"):
|
||||
os.remove(filename)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -27,7 +27,7 @@ from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||
from synapse.util import json_encoder
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Adds a signature to a JSON object.
|
||||
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
[check-manifest]
|
||||
ignore =
|
||||
.git-blame-ignore-revs
|
||||
contrib
|
||||
contrib/*
|
||||
docs/*
|
||||
pylint.cfg
|
||||
tox.ini
|
||||
|
||||
183
setup.py
183
setup.py
@@ -1,183 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright 2014-2017 OpenMarket Ltd
|
||||
# Copyright 2017 Vector Creations Ltd
|
||||
# Copyright 2017-2018 New Vector Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
from setuptools import Command, find_packages, setup
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
# Some notes on `setup.py test`:
|
||||
#
|
||||
# Once upon a time we used to try to make `setup.py test` run `tox` to run the
|
||||
# tests. That's a bad idea for three reasons:
|
||||
#
|
||||
# 1: `setup.py test` is supposed to find out whether the tests work in the
|
||||
# *current* environmentt, not whatever tox sets up.
|
||||
# 2: Empirically, trying to install tox during the test run wasn't working ("No
|
||||
# module named virtualenv").
|
||||
# 3: The tox documentation advises against it[1].
|
||||
#
|
||||
# Even further back in time, we used to use setuptools_trial [2]. That has its
|
||||
# own set of issues: for instance, it requires installation of Twisted to build
|
||||
# an sdist (because the recommended mode of usage is to add it to
|
||||
# `setup_requires`). That in turn means that in order to successfully run tox
|
||||
# you have to have the python header files installed for whichever version of
|
||||
# python tox uses (which is python3 on recent ubuntus, for example).
|
||||
#
|
||||
# So, for now at least, we stick with what appears to be the convention among
|
||||
# Twisted projects, and don't attempt to do anything when someone runs
|
||||
# `setup.py test`; instead we direct people to run `trial` directly if they
|
||||
# care.
|
||||
#
|
||||
# [1]: http://tox.readthedocs.io/en/2.5.0/example/basic.html#integration-with-setup-py-test-command
|
||||
# [2]: https://pypi.python.org/pypi/setuptools_trial
|
||||
class TestCommand(Command):
|
||||
def initialize_options(self):
|
||||
pass
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
print(
|
||||
"""Synapse's tests cannot be run via setup.py. To run them, try:
|
||||
PYTHONPATH="." trial tests
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def read_file(path_segments):
|
||||
"""Read a file from the package. Takes a list of strings to join to
|
||||
make the path"""
|
||||
file_path = os.path.join(here, *path_segments)
|
||||
with open(file_path) as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def exec_file(path_segments):
|
||||
"""Execute a single python file to get the variables defined in it"""
|
||||
result: Dict[str, Any] = {}
|
||||
code = read_file(path_segments)
|
||||
exec(code, result)
|
||||
return result
|
||||
|
||||
|
||||
version = exec_file(("synapse", "__init__.py"))["__version__"]
|
||||
dependencies = exec_file(("synapse", "python_dependencies.py"))
|
||||
long_description = read_file(("README.rst",))
|
||||
|
||||
REQUIREMENTS = dependencies["REQUIREMENTS"]
|
||||
CONDITIONAL_REQUIREMENTS = dependencies["CONDITIONAL_REQUIREMENTS"]
|
||||
ALL_OPTIONAL_REQUIREMENTS = dependencies["ALL_OPTIONAL_REQUIREMENTS"]
|
||||
|
||||
# Make `pip install matrix-synapse[all]` install all the optional dependencies.
|
||||
CONDITIONAL_REQUIREMENTS["all"] = list(ALL_OPTIONAL_REQUIREMENTS)
|
||||
|
||||
# Developer dependencies should not get included in "all".
|
||||
#
|
||||
# We pin black so that our tests don't start failing on new releases.
|
||||
CONDITIONAL_REQUIREMENTS["lint"] = [
|
||||
"isort==5.7.0",
|
||||
"black==22.3.0",
|
||||
"flake8-comprehensions",
|
||||
"flake8-bugbear==21.3.2",
|
||||
"flake8",
|
||||
]
|
||||
|
||||
CONDITIONAL_REQUIREMENTS["mypy"] = [
|
||||
"mypy==0.931",
|
||||
"mypy-zope==0.3.5",
|
||||
"types-bleach>=4.1.0",
|
||||
"types-jsonschema>=3.2.0",
|
||||
"types-opentracing>=2.4.2",
|
||||
"types-Pillow>=8.3.4",
|
||||
"types-psycopg2>=2.9.9",
|
||||
"types-pyOpenSSL>=20.0.7",
|
||||
"types-PyYAML>=5.4.10",
|
||||
"types-requests>=2.26.0",
|
||||
"types-setuptools>=57.4.0",
|
||||
]
|
||||
|
||||
# Dependencies which are exclusively required by unit test code. This is
|
||||
# NOT a list of all modules that are necessary to run the unit tests.
|
||||
# Tests assume that all optional dependencies are installed.
|
||||
#
|
||||
# parameterized_class decorator was introduced in parameterized 0.7.0
|
||||
CONDITIONAL_REQUIREMENTS["test"] = ["parameterized>=0.7.0", "idna>=2.5"]
|
||||
|
||||
CONDITIONAL_REQUIREMENTS["dev"] = (
|
||||
CONDITIONAL_REQUIREMENTS["lint"]
|
||||
+ CONDITIONAL_REQUIREMENTS["mypy"]
|
||||
+ CONDITIONAL_REQUIREMENTS["test"]
|
||||
+ [
|
||||
# The following are used by the release script
|
||||
"click==8.1.0",
|
||||
"redbaron==0.9.2",
|
||||
"GitPython==3.1.14",
|
||||
"commonmark==0.9.1",
|
||||
"pygithub==1.55",
|
||||
# The following are executed as commands by the release script.
|
||||
"twine",
|
||||
"towncrier",
|
||||
]
|
||||
)
|
||||
|
||||
setup(
|
||||
name="matrix-synapse",
|
||||
version=version,
|
||||
packages=find_packages(exclude=["tests", "tests.*"]),
|
||||
description="Reference homeserver for the Matrix decentralised comms protocol",
|
||||
install_requires=REQUIREMENTS,
|
||||
extras_require=CONDITIONAL_REQUIREMENTS,
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/x-rst",
|
||||
python_requires="~=3.7",
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
# Application
|
||||
"synapse_homeserver = synapse.app.homeserver:main",
|
||||
"synapse_worker = synapse.app.generic_worker:main",
|
||||
"synctl = synapse._scripts.synctl:main",
|
||||
# Scripts
|
||||
"export_signing_key = synapse._scripts.export_signing_key:main",
|
||||
"generate_config = synapse._scripts.generate_config:main",
|
||||
"generate_log_config = synapse._scripts.generate_log_config:main",
|
||||
"generate_signing_key = synapse._scripts.generate_signing_key:main",
|
||||
"hash_password = synapse._scripts.hash_password:main",
|
||||
"register_new_matrix_user = synapse._scripts.register_new_matrix_user:main",
|
||||
"synapse_port_db = synapse._scripts.synapse_port_db:main",
|
||||
"synapse_review_recent_signups = synapse._scripts.review_recent_signups:main",
|
||||
"update_synapse_database = synapse._scripts.update_synapse_database:main",
|
||||
]
|
||||
},
|
||||
classifiers=[
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Topic :: Communications :: Chat",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
],
|
||||
cmdclass={"test": TestCommand},
|
||||
)
|
||||
@@ -85,12 +85,19 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
def popitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||
def peekitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||
def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ...
|
||||
@overload
|
||||
def update(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def update(self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def update(self, **kwargs: _VT) -> None: ...
|
||||
# Mypy now reports the first overload as an error, because typeshed widened the type
|
||||
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
|
||||
# https://github.com/python/typeshed/pull/6653
|
||||
# Since sorteddicts don't change the signature of `update` from that of `dict`, we
|
||||
# let the stubs for `update` inherit from the stubs for `dict`. (I suspect we could
|
||||
# do the same for many othe methods.) We leave the stubs commented to better track
|
||||
# how this file has evolved from the original stubs.
|
||||
# @overload
|
||||
# def update(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||
# @overload
|
||||
# def update(self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ...
|
||||
# @overload
|
||||
# def update(self, **kwargs: _VT) -> None: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[
|
||||
@@ -103,7 +110,7 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
self,
|
||||
start: Optional[int] = ...,
|
||||
stop: Optional[int] = ...,
|
||||
reverse=bool,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_KT]: ...
|
||||
def bisect_left(self, value: _KT) -> int: ...
|
||||
def bisect_right(self, value: _KT) -> int: ...
|
||||
@@ -115,9 +122,7 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
|
||||
def __getitem__(self, index: slice) -> List[_KT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedItemsView( # type: ignore
|
||||
ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]
|
||||
):
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ...
|
||||
|
||||
@@ -81,7 +81,7 @@ class SortedList(MutableSequence[_T]):
|
||||
self,
|
||||
start: Optional[int] = ...,
|
||||
stop: Optional[int] = ...,
|
||||
reverse=bool,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def _islice(
|
||||
self,
|
||||
@@ -153,14 +153,14 @@ class SortedKeyList(SortedList[_T]):
|
||||
maximum: Optional[int] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
): ...
|
||||
) -> Iterator[_T]: ...
|
||||
def irange_key(
|
||||
self,
|
||||
min_key: Optional[Any] = ...,
|
||||
max_key: Optional[Any] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
reserve: bool = ...,
|
||||
): ...
|
||||
) -> Iterator[_T]: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
def bisect_right(self, value: _T) -> int: ...
|
||||
def bisect(self, value: _T) -> int: ...
|
||||
|
||||
@@ -103,7 +103,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
self,
|
||||
start: Optional[int] = ...,
|
||||
stop: Optional[int] = ...,
|
||||
reverse=bool,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def irange(
|
||||
self,
|
||||
|
||||
@@ -18,6 +18,8 @@ from typing import Any, List, Optional, Type, Union
|
||||
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.defer import Deferred
|
||||
from twisted.internet.interfaces import IAddress
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
class RedisProtocol(protocol.Protocol):
|
||||
def publish(self, channel: str, message: bytes) -> "Deferred[None]": ...
|
||||
@@ -34,11 +36,14 @@ class RedisProtocol(protocol.Protocol):
|
||||
def get(self, key: str) -> "Deferred[Any]": ...
|
||||
|
||||
class SubscriberProtocol(RedisProtocol):
|
||||
def __init__(self, *args, **kwargs): ...
|
||||
def __init__(self, *args: object, **kwargs: object): ...
|
||||
password: Optional[str]
|
||||
def subscribe(self, channels: Union[str, List[str]]): ...
|
||||
def connectionMade(self): ...
|
||||
def connectionLost(self, reason): ...
|
||||
def subscribe(self, channels: Union[str, List[str]]) -> "Deferred[None]": ...
|
||||
def connectionMade(self) -> None: ...
|
||||
# type-ignore: twisted.internet.protocol.Protocol provides a default argument for
|
||||
# `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's
|
||||
# actually specified in twisted.internet.interfaces.IProtocol.
|
||||
def connectionLost(self, reason: Failure) -> None: ... # type: ignore[override]
|
||||
|
||||
def lazyConnection(
|
||||
host: str = ...,
|
||||
@@ -74,7 +79,7 @@ class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
replyTimeout: Optional[int] = None,
|
||||
convertNumbers: Optional[int] = True,
|
||||
): ...
|
||||
def buildProtocol(self, addr) -> RedisProtocol: ...
|
||||
def buildProtocol(self, addr: IAddress) -> RedisProtocol: ...
|
||||
|
||||
class SubscriberFactory(RedisFactory):
|
||||
def __init__(self) -> None: ...
|
||||
|
||||
@@ -20,6 +20,8 @@ import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
from matrix_common.versionstring import get_distribution_version_string
|
||||
|
||||
# Check that we're not running on an unsupported Python version.
|
||||
if sys.version_info < (3, 7):
|
||||
print("Synapse requires Python 3.7 or above.")
|
||||
@@ -68,7 +70,7 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.57.0"
|
||||
__version__ = get_distribution_version_string("matrix-synapse")
|
||||
|
||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
# We import here so that we don't have to install a bunch of deps when
|
||||
|
||||
@@ -187,7 +187,7 @@ class Auth:
|
||||
Once get_user_by_req has set up the opentracing span, this does the actual work.
|
||||
"""
|
||||
try:
|
||||
ip_addr = request.getClientIP()
|
||||
ip_addr = request.getClientAddress().host
|
||||
user_agent = get_request_user_agent(request)
|
||||
|
||||
access_token = self.get_access_token_from_request(request)
|
||||
@@ -356,7 +356,7 @@ class Auth:
|
||||
return None, None, None
|
||||
|
||||
if app_service.ip_range_whitelist:
|
||||
ip_address = IPAddress(request.getClientIP())
|
||||
ip_address = IPAddress(request.getClientAddress().host)
|
||||
if ip_address not in app_service.ip_range_whitelist:
|
||||
return None, None, None
|
||||
|
||||
@@ -417,7 +417,8 @@ class Auth:
|
||||
"""
|
||||
|
||||
if rights == "access":
|
||||
# first look in the database
|
||||
# First look in the database to see if the access token is present
|
||||
# as an opaque token.
|
||||
r = await self.store.get_user_by_access_token(token)
|
||||
if r:
|
||||
valid_until_ms = r.valid_until_ms
|
||||
@@ -434,7 +435,8 @@ class Auth:
|
||||
|
||||
return r
|
||||
|
||||
# otherwise it needs to be a valid macaroon
|
||||
# If the token isn't found in the database, then it could still be a
|
||||
# macaroon, so we check that here.
|
||||
try:
|
||||
user_id, guest = self._parse_and_validate_macaroon(token, rights)
|
||||
|
||||
@@ -482,8 +484,12 @@ class Auth:
|
||||
TypeError,
|
||||
ValueError,
|
||||
) as e:
|
||||
logger.warning("Invalid macaroon in auth: %s %s", type(e), e)
|
||||
raise InvalidClientTokenError("Invalid macaroon passed.")
|
||||
logger.warning(
|
||||
"Invalid access token in auth: %s %s.",
|
||||
type(e),
|
||||
e,
|
||||
)
|
||||
raise InvalidClientTokenError("Invalid access token passed.")
|
||||
|
||||
def _parse_and_validate_macaroon(
|
||||
self, token: str, rights: str = "access"
|
||||
@@ -504,10 +510,7 @@ class Auth:
|
||||
try:
|
||||
macaroon = pymacaroons.Macaroon.deserialize(token)
|
||||
except Exception: # deserialize can throw more-or-less anything
|
||||
# doesn't look like a macaroon: treat it as an opaque token which
|
||||
# must be in the database.
|
||||
# TODO: it would be nice to get rid of this, but apparently some
|
||||
# people use access tokens which aren't macaroons
|
||||
# The access token doesn't look like a macaroon.
|
||||
raise _InvalidMacaroonException()
|
||||
|
||||
try:
|
||||
|
||||
@@ -179,8 +179,6 @@ class RelationTypes:
|
||||
REPLACE: Final = "m.replace"
|
||||
REFERENCE: Final = "m.reference"
|
||||
THREAD: Final = "m.thread"
|
||||
# TODO Remove this in Synapse >= v1.57.0.
|
||||
UNSTABLE_THREAD: Final = "io.element.thread"
|
||||
|
||||
|
||||
class LimitBlockingTypes:
|
||||
@@ -257,7 +255,5 @@ class GuestAccess:
|
||||
|
||||
class ReceiptTypes:
|
||||
READ: Final = "m.read"
|
||||
|
||||
|
||||
class ReadReceiptEventFields:
|
||||
MSC2285_HIDDEN: Final = "org.matrix.msc2285.hidden"
|
||||
READ_PRIVATE: Final = "org.matrix.msc2285.read.private"
|
||||
FULLY_READ: Final = "m.fully_read"
|
||||
|
||||
@@ -79,6 +79,8 @@ class Codes:
|
||||
UNABLE_AUTHORISE_JOIN = "M_UNABLE_TO_AUTHORISE_JOIN"
|
||||
UNABLE_TO_GRANT_JOIN = "M_UNABLE_TO_GRANT_JOIN"
|
||||
|
||||
UNREDACTED_CONTENT_DELETED = "FI.MAU.MSC2815_UNREDACTED_CONTENT_DELETED"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
"""An exception with integer code and message string attributes.
|
||||
@@ -483,6 +485,22 @@ class RequestSendFailed(RuntimeError):
|
||||
self.can_retry = can_retry
|
||||
|
||||
|
||||
class UnredactedContentDeletedError(SynapseError):
|
||||
def __init__(self, content_keep_ms: Optional[int] = None):
|
||||
super().__init__(
|
||||
404,
|
||||
"The content for that event has already been erased from the database",
|
||||
errcode=Codes.UNREDACTED_CONTENT_DELETED,
|
||||
)
|
||||
self.content_keep_ms = content_keep_ms
|
||||
|
||||
def error_dict(self) -> "JsonDict":
|
||||
extra = {}
|
||||
if self.content_keep_ms is not None:
|
||||
extra = {"fi.mau.msc2815.content_keep_ms": self.content_keep_ms}
|
||||
return cs_error(self.msg, self.errcode, **extra)
|
||||
|
||||
|
||||
def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs: Any) -> "JsonDict":
|
||||
"""Utility method for constructing an error response for client-server
|
||||
interactions.
|
||||
|
||||
@@ -19,6 +19,7 @@ from typing import (
|
||||
TYPE_CHECKING,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Collection,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
@@ -89,9 +90,7 @@ ROOM_EVENT_FILTER_SCHEMA = {
|
||||
"org.matrix.not_labels": {"type": "array", "items": {"type": "string"}},
|
||||
# MSC3440, filtering by event relations.
|
||||
"related_by_senders": {"type": "array", "items": {"type": "string"}},
|
||||
"io.element.relation_senders": {"type": "array", "items": {"type": "string"}},
|
||||
"related_by_rel_types": {"type": "array", "items": {"type": "string"}},
|
||||
"io.element.relation_types": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -323,16 +322,6 @@ class Filter:
|
||||
self.related_by_senders = self.filter_json.get("related_by_senders", None)
|
||||
self.related_by_rel_types = self.filter_json.get("related_by_rel_types", None)
|
||||
|
||||
# Fallback to the unstable prefix if the stable version is not given.
|
||||
if hs.config.experimental.msc3440_enabled:
|
||||
self.related_by_senders = self.related_by_senders or self.filter_json.get(
|
||||
"io.element.relation_senders", None
|
||||
)
|
||||
self.related_by_rel_types = (
|
||||
self.related_by_rel_types
|
||||
or self.filter_json.get("io.element.relation_types", None)
|
||||
)
|
||||
|
||||
def filters_all_types(self) -> bool:
|
||||
return "*" in self.not_types
|
||||
|
||||
@@ -456,9 +445,9 @@ class Filter:
|
||||
return room_ids
|
||||
|
||||
async def _check_event_relations(
|
||||
self, events: Iterable[FilterEvent]
|
||||
self, events: Collection[FilterEvent]
|
||||
) -> List[FilterEvent]:
|
||||
# The event IDs to check, mypy doesn't understand the ifinstance check.
|
||||
# The event IDs to check, mypy doesn't understand the isinstance check.
|
||||
event_ids = [event.event_id for event in events if isinstance(event, EventBase)] # type: ignore[attr-defined]
|
||||
event_ids_to_keep = set(
|
||||
await self._store.events_have_relations(
|
||||
|
||||
@@ -38,6 +38,7 @@ from typing import (
|
||||
|
||||
from cryptography.utils import CryptographyDeprecationWarning
|
||||
from matrix_common.versionstring import get_distribution_version_string
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
import twisted
|
||||
from twisted.internet import defer, error, reactor as _reactor
|
||||
@@ -48,7 +49,6 @@ from twisted.logger import LoggingFile, LogLevel
|
||||
from twisted.protocols.tls import TLSMemoryBIOFactory
|
||||
from twisted.python.threadpool import ThreadPool
|
||||
|
||||
import synapse
|
||||
from synapse.api.constants import MAX_PDU_SIZE
|
||||
from synapse.app import check_bind_error
|
||||
from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
@@ -60,6 +60,7 @@ from synapse.events.spamcheck import load_legacy_spam_checkers
|
||||
from synapse.events.third_party_rules import load_legacy_third_party_event_rules
|
||||
from synapse.handlers.auth import load_legacy_password_auth_providers
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import init_tracer
|
||||
from synapse.metrics import install_gc_manager, register_threadpool
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from synapse.metrics.jemalloc import setup_jemalloc_stats
|
||||
@@ -81,11 +82,12 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# list of tuples of function, args list, kwargs dict
|
||||
_sighup_callbacks: List[
|
||||
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
||||
Tuple[Callable[..., None], Tuple[object, ...], Dict[str, object]]
|
||||
] = []
|
||||
P = ParamSpec("P")
|
||||
|
||||
|
||||
def register_sighup(func: Callable[..., None], *args: Any, **kwargs: Any) -> None:
|
||||
def register_sighup(func: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None:
|
||||
"""
|
||||
Register a function to be called when a SIGHUP occurs.
|
||||
|
||||
@@ -93,7 +95,9 @@ def register_sighup(func: Callable[..., None], *args: Any, **kwargs: Any) -> Non
|
||||
func: Function to be called when sent a SIGHUP signal.
|
||||
*args, **kwargs: args and kwargs to be passed to the target function.
|
||||
"""
|
||||
_sighup_callbacks.append((func, args, kwargs))
|
||||
# This type-ignore should be redundant once we use a mypy release with
|
||||
# https://github.com/python/mypy/pull/12668.
|
||||
_sighup_callbacks.append((func, args, kwargs)) # type: ignore[arg-type]
|
||||
|
||||
|
||||
def start_worker_reactor(
|
||||
@@ -214,7 +218,9 @@ def redirect_stdio_to_logs() -> None:
|
||||
print("Redirected stdout/stderr to logs")
|
||||
|
||||
|
||||
def register_start(cb: Callable[..., Awaitable], *args: Any, **kwargs: Any) -> None:
|
||||
def register_start(
|
||||
cb: Callable[P, Awaitable], *args: P.args, **kwargs: P.kwargs
|
||||
) -> None:
|
||||
"""Register a callback with the reactor, to be called once it is running
|
||||
|
||||
This can be used to initialise parts of the system which require an asynchronous
|
||||
@@ -431,7 +437,7 @@ async def start(hs: "HomeServer") -> None:
|
||||
refresh_certificate(hs)
|
||||
|
||||
# Start the tracer
|
||||
synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa
|
||||
init_tracer(hs) # noqa
|
||||
|
||||
# Instantiate the modules so they can register their web resources to the module API
|
||||
# before we start the listeners.
|
||||
|
||||
@@ -210,7 +210,7 @@ def start(config_options: List[str]) -> None:
|
||||
config.logging.no_redirect_stdio = True
|
||||
|
||||
# Explicitly disable background processes
|
||||
config.server.update_user_directory = False
|
||||
config.worker.should_update_user_directory = False
|
||||
config.worker.run_background_tasks = False
|
||||
config.worker.start_pushers = False
|
||||
config.worker.pusher_shard_config.instances = []
|
||||
|
||||
@@ -441,38 +441,6 @@ def start(config_options: List[str]) -> None:
|
||||
"synapse.app.user_dir",
|
||||
)
|
||||
|
||||
if config.worker.worker_app == "synapse.app.appservice":
|
||||
if config.appservice.notify_appservices:
|
||||
sys.stderr.write(
|
||||
"\nThe appservices must be disabled in the main synapse process"
|
||||
"\nbefore they can be run in a separate worker."
|
||||
"\nPlease add ``notify_appservices: false`` to the main config"
|
||||
"\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Force the appservice to start since they will be disabled in the main config
|
||||
config.appservice.notify_appservices = True
|
||||
else:
|
||||
# For other worker types we force this to off.
|
||||
config.appservice.notify_appservices = False
|
||||
|
||||
if config.worker.worker_app == "synapse.app.user_dir":
|
||||
if config.server.update_user_directory:
|
||||
sys.stderr.write(
|
||||
"\nThe update_user_directory must be disabled in the main synapse process"
|
||||
"\nbefore they can be run in a separate worker."
|
||||
"\nPlease add ``update_user_directory: false`` to the main config"
|
||||
"\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Force the pushers to start since they will be disabled in the main config
|
||||
config.server.update_user_directory = True
|
||||
else:
|
||||
# For other worker types we force this to off.
|
||||
config.server.update_user_directory = False
|
||||
|
||||
synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
|
||||
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ logger = logging.getLogger(__name__)
|
||||
# user ID -> {device ID -> {algorithm -> count}}
|
||||
TransactionOneTimeKeyCounts = Dict[str, Dict[str, Dict[str, int]]]
|
||||
|
||||
# Type for the `device_unused_fallback_keys` field in an appservice transaction
|
||||
# Type for the `device_unused_fallback_key_types` field in an appservice transaction
|
||||
# user ID -> {device ID -> [algorithm]}
|
||||
TransactionUnusedFallbackKeys = Dict[str, Dict[str, List[str]]]
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ import urllib.parse
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple
|
||||
|
||||
from prometheus_client import Counter
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
from synapse.api.constants import EventTypes, Membership, ThirdPartyEntityKind
|
||||
from synapse.api.errors import CodeMessageException
|
||||
@@ -66,7 +67,7 @@ def _is_valid_3pe_metadata(info: JsonDict) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _is_valid_3pe_result(r: JsonDict, field: str) -> bool:
|
||||
def _is_valid_3pe_result(r: object, field: str) -> TypeGuard[JsonDict]:
|
||||
if not isinstance(r, dict):
|
||||
return False
|
||||
|
||||
@@ -278,7 +279,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
] = one_time_key_counts
|
||||
if unused_fallback_keys:
|
||||
body[
|
||||
"org.matrix.msc3202.device_unused_fallback_keys"
|
||||
"org.matrix.msc3202.device_unused_fallback_key_types"
|
||||
] = unused_fallback_keys
|
||||
if device_list_summary:
|
||||
body["org.matrix.msc3202.device_lists"] = {
|
||||
|
||||
@@ -33,7 +33,6 @@ class AppServiceConfig(Config):
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
self.app_service_config_files = config.get("app_service_config_files", [])
|
||||
self.notify_appservices = config.get("notify_appservices", True)
|
||||
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
|
||||
|
||||
def generate_config_section(cls, **kwargs: Any) -> str:
|
||||
@@ -56,7 +55,8 @@ def load_appservices(
|
||||
) -> List[ApplicationService]:
|
||||
"""Returns a list of Application Services from the config files."""
|
||||
if not isinstance(config_files, list):
|
||||
logger.warning("Expected %s to be a list of AS config files.", config_files)
|
||||
# type-ignore: this function gets arbitrary json value; we do use this path.
|
||||
logger.warning("Expected %s to be a list of AS config files.", config_files) # type: ignore[unreachable]
|
||||
return []
|
||||
|
||||
# Dicts of value -> filename
|
||||
|
||||
@@ -26,16 +26,13 @@ class ExperimentalConfig(Config):
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
experimental = config.get("experimental_features") or {}
|
||||
|
||||
# MSC3440 (thread relation)
|
||||
self.msc3440_enabled: bool = experimental.get("msc3440_enabled", False)
|
||||
|
||||
# MSC3026 (busy presence state)
|
||||
self.msc3026_enabled: bool = experimental.get("msc3026_enabled", False)
|
||||
|
||||
# MSC2716 (importing historical messages)
|
||||
self.msc2716_enabled: bool = experimental.get("msc2716_enabled", False)
|
||||
|
||||
# MSC2285 (hidden read receipts)
|
||||
# MSC2285 (private read receipts)
|
||||
self.msc2285_enabled: bool = experimental.get("msc2285_enabled", False)
|
||||
|
||||
# MSC3244 (room version capabilities)
|
||||
@@ -77,7 +74,13 @@ class ExperimentalConfig(Config):
|
||||
self.msc3720_enabled: bool = experimental.get("msc3720_enabled", False)
|
||||
|
||||
# The deprecated groups feature.
|
||||
self.groups_enabled: bool = experimental.get("groups_enabled", True)
|
||||
self.groups_enabled: bool = experimental.get("groups_enabled", False)
|
||||
|
||||
# MSC2654: Unread counts
|
||||
self.msc2654_enabled: bool = experimental.get("msc2654_enabled", False)
|
||||
|
||||
# MSC2815 (allow room moderators to view redacted event content)
|
||||
self.msc2815_enabled: bool = experimental.get("msc2815_enabled", False)
|
||||
|
||||
# MSC3786 (Add a default push rule to ignore m.room.server_acl events)
|
||||
self.msc3786_enabled: bool = experimental.get("msc3786_enabled", False)
|
||||
|
||||
@@ -46,7 +46,7 @@ class FederationConfig(Config):
|
||||
)
|
||||
|
||||
self.allow_device_name_lookup_over_federation = config.get(
|
||||
"allow_device_name_lookup_over_federation", True
|
||||
"allow_device_name_lookup_over_federation", False
|
||||
)
|
||||
|
||||
def generate_config_section(self, **kwargs: Any) -> str:
|
||||
@@ -81,11 +81,11 @@ class FederationConfig(Config):
|
||||
#
|
||||
#allow_profile_lookup_over_federation: false
|
||||
|
||||
# Uncomment to disable device display name lookup over federation. By default, the
|
||||
# Federation API allows other homeservers to obtain device display names of any user
|
||||
# on this homeserver. Defaults to 'true'.
|
||||
# Uncomment to allow device display name lookup over federation. By default, the
|
||||
# Federation API prevents other homeservers from obtaining the display names of
|
||||
# user devices on this homeserver. Defaults to 'false'.
|
||||
#
|
||||
#allow_device_name_lookup_over_federation: false
|
||||
#allow_device_name_lookup_over_federation: true
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -110,13 +110,6 @@ loggers:
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
|
||||
twisted:
|
||||
# We send the twisted logging directly to the file handler,
|
||||
# to work around https://github.com/matrix-org/synapse/issues/3471
|
||||
# when using "buffer" logger. Use "console" to log to stderr instead.
|
||||
handlers: [file]
|
||||
propagate: false
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
|
||||
|
||||
@@ -43,6 +43,9 @@ class RegistrationConfig(Config):
|
||||
self.registration_requires_token = config.get(
|
||||
"registration_requires_token", False
|
||||
)
|
||||
self.enable_registration_token_3pid_bypass = config.get(
|
||||
"enable_registration_token_3pid_bypass", False
|
||||
)
|
||||
self.registration_shared_secret = config.get("registration_shared_secret")
|
||||
|
||||
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
|
||||
@@ -309,6 +312,12 @@ class RegistrationConfig(Config):
|
||||
#
|
||||
#registration_requires_token: true
|
||||
|
||||
# Allow users to submit a token during registration to bypass any required 3pid
|
||||
# steps configured in `registrations_require_3pid`.
|
||||
# Defaults to false, requiring that registration tokens (if enabled) complete a 3pid flow.
|
||||
#
|
||||
#enable_registration_token_3pid_bypass: false
|
||||
|
||||
# If set, allows registration of standard or admin accounts by anyone who
|
||||
# has the shared secret, even if registration is otherwise disabled.
|
||||
#
|
||||
|
||||
@@ -186,7 +186,7 @@ KNOWN_RESOURCES = {
|
||||
class HttpResourceConfig:
|
||||
names: List[str] = attr.ib(
|
||||
factory=list,
|
||||
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), # type: ignore
|
||||
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)),
|
||||
)
|
||||
compress: bool = attr.ib(
|
||||
default=False,
|
||||
@@ -231,9 +231,7 @@ class ManholeConfig:
|
||||
class LimitRemoteRoomsConfig:
|
||||
enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
|
||||
complexity: Union[float, int] = attr.ib(
|
||||
validator=attr.validators.instance_of(
|
||||
(float, int) # type: ignore[arg-type] # noqa
|
||||
),
|
||||
validator=attr.validators.instance_of((float, int)), # noqa
|
||||
default=1.0,
|
||||
)
|
||||
complexity_error: str = attr.ib(
|
||||
@@ -321,10 +319,6 @@ class ServerConfig(Config):
|
||||
self.presence_router_config,
|
||||
) = load_module(presence_router_config, ("presence", "presence_router"))
|
||||
|
||||
# Whether to update the user directory or not. This should be set to
|
||||
# false only if we are updating the user directory in a worker
|
||||
self.update_user_directory = config.get("update_user_directory", True)
|
||||
|
||||
# whether to enable the media repository endpoints. This should be set
|
||||
# to false if the media repository is running as a separate endpoint;
|
||||
# doing so ensures that we will not run cache cleanup jobs on the
|
||||
@@ -415,6 +409,7 @@ class ServerConfig(Config):
|
||||
)
|
||||
|
||||
self.mau_trial_days = config.get("mau_trial_days", 0)
|
||||
self.mau_appservice_trial_days = config.get("mau_appservice_trial_days", {})
|
||||
self.mau_limit_alerting = config.get("mau_limit_alerting", True)
|
||||
|
||||
# How long to keep redacted events in the database in unredacted form
|
||||
@@ -680,14 +675,6 @@ class ServerConfig(Config):
|
||||
config.get("use_account_validity_in_account_status") or False
|
||||
)
|
||||
|
||||
# This is a temporary option that enables fully using the new
|
||||
# `device_lists_changes_in_room` without the backwards compat code. This
|
||||
# is primarily for testing. If enabled the server should *not* be
|
||||
# downgraded, as it may lead to missing device list updates.
|
||||
self.use_new_device_lists_changes_in_room = (
|
||||
config.get("use_new_device_lists_changes_in_room") or False
|
||||
)
|
||||
|
||||
self.rooms_to_exclude_from_sync: List[str] = (
|
||||
config.get("exclude_rooms_from_sync") or []
|
||||
)
|
||||
@@ -1115,6 +1102,11 @@ class ServerConfig(Config):
|
||||
# sign up in a short space of time never to return after their initial
|
||||
# session.
|
||||
#
|
||||
# The option `mau_appservice_trial_days` is similar to `mau_trial_days`, but
|
||||
# applies a different trial number if the user was registered by an appservice.
|
||||
# A value of 0 means no trial days are applied. Appservices not listed in this
|
||||
# dictionary use the value of `mau_trial_days` instead.
|
||||
#
|
||||
# 'mau_limit_alerting' is a means of limiting client side alerting
|
||||
# should the mau limit be reached. This is useful for small instances
|
||||
# where the admin has 5 mau seats (say) for 5 specific people and no
|
||||
@@ -1125,6 +1117,8 @@ class ServerConfig(Config):
|
||||
#max_mau_value: 50
|
||||
#mau_trial_days: 2
|
||||
#mau_limit_alerting: false
|
||||
#mau_appservice_trial_days:
|
||||
# "appservice-id": 1
|
||||
|
||||
# If enabled, the metrics for the number of monthly active users will
|
||||
# be populated, however no one will be limited. If limit_usage_by_mau
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
from typing import Any, List, Union
|
||||
import logging
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
import attr
|
||||
|
||||
@@ -42,6 +43,13 @@ synapse process before they can be run in a separate worker.
|
||||
Please add ``start_pushers: false`` to the main config
|
||||
"""
|
||||
|
||||
_DEPRECATED_WORKER_DUTY_OPTION_USED = """
|
||||
The '%s' configuration option is deprecated and will be removed in a future
|
||||
Synapse version. Please use ``%s: name_of_worker`` instead.
|
||||
"""
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]:
|
||||
"""Helper for allowing parsing a string or list of strings to a config
|
||||
@@ -296,6 +304,112 @@ class WorkerConfig(Config):
|
||||
self.worker_name is None and background_tasks_instance == "master"
|
||||
) or self.worker_name == background_tasks_instance
|
||||
|
||||
self.should_notify_appservices = self._should_this_worker_perform_duty(
|
||||
config,
|
||||
legacy_master_option_name="notify_appservices",
|
||||
legacy_worker_app_name="synapse.app.appservice",
|
||||
new_option_name="notify_appservices_from_worker",
|
||||
)
|
||||
|
||||
self.should_update_user_directory = self._should_this_worker_perform_duty(
|
||||
config,
|
||||
legacy_master_option_name="update_user_directory",
|
||||
legacy_worker_app_name="synapse.app.user_dir",
|
||||
new_option_name="update_user_directory_from_worker",
|
||||
)
|
||||
|
||||
def _should_this_worker_perform_duty(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
legacy_master_option_name: str,
|
||||
legacy_worker_app_name: str,
|
||||
new_option_name: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Figures out whether this worker should perform a certain duty.
|
||||
|
||||
This function is temporary and is only to deal with the complexity
|
||||
of allowing old, transitional and new configurations all at once.
|
||||
|
||||
Contradictions between the legacy and new part of a transitional configuration
|
||||
will lead to a ConfigError.
|
||||
|
||||
Parameters:
|
||||
config: The config dictionary
|
||||
legacy_master_option_name: The name of a legacy option, whose value is boolean,
|
||||
specifying whether it's the master that should handle a certain duty.
|
||||
e.g. "notify_appservices"
|
||||
legacy_worker_app_name: The name of a legacy Synapse worker application
|
||||
that would traditionally perform this duty.
|
||||
e.g. "synapse.app.appservice"
|
||||
new_option_name: The name of the new option, whose value is the name of a
|
||||
designated worker to perform the duty.
|
||||
e.g. "notify_appservices_from_worker"
|
||||
"""
|
||||
|
||||
# None means 'unspecified'; True means 'run here' and False means
|
||||
# 'don't run here'.
|
||||
new_option_should_run_here = None
|
||||
if new_option_name in config:
|
||||
designated_worker = config[new_option_name] or "master"
|
||||
new_option_should_run_here = (
|
||||
designated_worker == "master" and self.worker_name is None
|
||||
) or designated_worker == self.worker_name
|
||||
|
||||
legacy_option_should_run_here = None
|
||||
if legacy_master_option_name in config:
|
||||
run_on_master = bool(config[legacy_master_option_name])
|
||||
|
||||
legacy_option_should_run_here = (
|
||||
self.worker_name is None and run_on_master
|
||||
) or (self.worker_app == legacy_worker_app_name and not run_on_master)
|
||||
|
||||
# Suggest using the new option instead.
|
||||
logger.warning(
|
||||
_DEPRECATED_WORKER_DUTY_OPTION_USED,
|
||||
legacy_master_option_name,
|
||||
new_option_name,
|
||||
)
|
||||
|
||||
if self.worker_app == legacy_worker_app_name and config.get(
|
||||
legacy_master_option_name, True
|
||||
):
|
||||
# As an extra bit of complication, we need to check that the
|
||||
# specialised worker is only used if the legacy config says the
|
||||
# master isn't performing the duties.
|
||||
raise ConfigError(
|
||||
f"Cannot use deprecated worker app type '{legacy_worker_app_name}' whilst deprecated option '{legacy_master_option_name}' is not set to false.\n"
|
||||
f"Consider setting `worker_app: synapse.app.generic_worker` and using the '{new_option_name}' option instead.\n"
|
||||
f"The '{new_option_name}' option replaces '{legacy_master_option_name}'."
|
||||
)
|
||||
|
||||
if new_option_should_run_here is None and legacy_option_should_run_here is None:
|
||||
# Neither option specified; the fallback behaviour is to run on the main process
|
||||
return self.worker_name is None
|
||||
|
||||
if (
|
||||
new_option_should_run_here is not None
|
||||
and legacy_option_should_run_here is not None
|
||||
):
|
||||
# Both options specified; ensure they match!
|
||||
if new_option_should_run_here != legacy_option_should_run_here:
|
||||
update_worker_type = (
|
||||
" and set worker_app: synapse.app.generic_worker"
|
||||
if self.worker_app == legacy_worker_app_name
|
||||
else ""
|
||||
)
|
||||
# If the values conflict, we suggest the admin removes the legacy option
|
||||
# for simplicity.
|
||||
raise ConfigError(
|
||||
f"Conflicting configuration options: {legacy_master_option_name} (legacy), {new_option_name} (new).\n"
|
||||
f"Suggestion: remove {legacy_master_option_name}{update_worker_type}.\n"
|
||||
)
|
||||
|
||||
# We've already validated that these aren't conflicting; now just see if
|
||||
# either is True.
|
||||
# (By this point, these are either the same value or only one is not None.)
|
||||
return bool(new_option_should_run_here or legacy_option_should_run_here)
|
||||
|
||||
def generate_config_section(self, **kwargs: Any) -> str:
|
||||
return """\
|
||||
## Workers ##
|
||||
|
||||
@@ -213,10 +213,17 @@ class _EventInternalMetadata:
|
||||
return self.outlier
|
||||
|
||||
def is_out_of_band_membership(self) -> bool:
|
||||
"""Whether this is an out of band membership, like an invite or an invite
|
||||
rejection. This is needed as those events are marked as outliers, but
|
||||
they still need to be processed as if they're new events (e.g. updating
|
||||
invite state in the database, relaying to clients, etc).
|
||||
"""Whether this event is an out-of-band membership.
|
||||
|
||||
OOB memberships are a special case of outlier events: they are membership events
|
||||
for federated rooms that we aren't full members of. Examples include invites
|
||||
received over federation, and rejections for such invites.
|
||||
|
||||
The concept of an OOB membership is needed because these events need to be
|
||||
processed as if they're new regular events (e.g. updating membership state in
|
||||
the database, relaying to clients via /sync, etc) despite being outliers.
|
||||
|
||||
See also https://matrix-org.github.io/synapse/develop/development/room-dag-concepts.html#out-of-band-membership-events.
|
||||
|
||||
(Added in synapse 0.99.0, so may be unreliable for events received before that)
|
||||
"""
|
||||
|
||||
@@ -22,11 +22,16 @@ from typing import (
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from twisted.internet.defer import CancelledError
|
||||
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.util.async_helpers import maybe_awaitable
|
||||
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
@@ -40,6 +45,10 @@ GET_INTERESTED_USERS_CALLBACK = Callable[[str], Awaitable[Union[Set[str], str]]]
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
def load_legacy_presence_router(hs: "HomeServer") -> None:
|
||||
"""Wrapper that loads a presence router module configured using the old
|
||||
configuration, and registers the hooks they implement.
|
||||
@@ -63,13 +72,15 @@ def load_legacy_presence_router(hs: "HomeServer") -> None:
|
||||
|
||||
# All methods that the module provides should be async, but this wasn't enforced
|
||||
# in the old module system, so we wrap them if needed
|
||||
def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]:
|
||||
def async_wrapper(
|
||||
f: Optional[Callable[P, R]]
|
||||
) -> Optional[Callable[P, Awaitable[R]]]:
|
||||
# f might be None if the callback isn't implemented by the module. In this
|
||||
# case we don't want to register a callback at all so we return None.
|
||||
if f is None:
|
||||
return None
|
||||
|
||||
def run(*args: Any, **kwargs: Any) -> Awaitable:
|
||||
def run(*args: P.args, **kwargs: P.kwargs) -> Awaitable[R]:
|
||||
# Assertion required because mypy can't prove we won't change `f`
|
||||
# back to `None`. See
|
||||
# https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions
|
||||
@@ -80,7 +91,7 @@ def load_legacy_presence_router(hs: "HomeServer") -> None:
|
||||
return run
|
||||
|
||||
# Register the hooks through the module API.
|
||||
hooks = {
|
||||
hooks: Dict[str, Optional[Callable[..., Any]]] = {
|
||||
hook: async_wrapper(getattr(presence_router, hook, None))
|
||||
for hook in presence_router_methods
|
||||
}
|
||||
@@ -147,7 +158,11 @@ class PresenceRouter:
|
||||
# run all the callbacks for get_users_for_states and combine the results
|
||||
for callback in self._get_users_for_states_callbacks:
|
||||
try:
|
||||
result = await callback(state_updates)
|
||||
# Note: result is an object here, because we don't trust modules to
|
||||
# return the types they're supposed to.
|
||||
result: object = await delay_cancellation(callback(state_updates))
|
||||
except CancelledError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
||||
continue
|
||||
@@ -199,7 +214,9 @@ class PresenceRouter:
|
||||
# run all the callbacks for get_interested_users and combine the results
|
||||
for callback in self._get_interested_users_callbacks:
|
||||
try:
|
||||
result = await callback(user_id)
|
||||
result = await delay_cancellation(callback(user_id))
|
||||
except CancelledError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
||||
continue
|
||||
|
||||
@@ -15,12 +15,10 @@ from typing import TYPE_CHECKING, List, Optional, Tuple, Union
|
||||
|
||||
import attr
|
||||
from frozendict import frozendict
|
||||
|
||||
from twisted.internet.defer import Deferred
|
||||
from typing_extensions import Literal
|
||||
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.events import EventBase
|
||||
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||
from synapse.types import JsonDict, StateMap
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -60,6 +58,9 @@ class EventContext:
|
||||
If ``state_group`` is None (ie, the event is an outlier),
|
||||
``state_group_before_event`` will always also be ``None``.
|
||||
|
||||
state_delta_due_to_event: If `state_group` and `state_group_before_event` are not None
|
||||
then this is the delta of the state between the two groups.
|
||||
|
||||
prev_group: If it is known, ``state_group``'s prev_group. Note that this being
|
||||
None does not necessarily mean that ``state_group`` does not have
|
||||
a prev_group!
|
||||
@@ -78,73 +79,47 @@ class EventContext:
|
||||
app_service: If this event is being sent by a (local) application service, that
|
||||
app service.
|
||||
|
||||
_current_state_ids: The room state map, including this event - ie, the state
|
||||
in ``state_group``.
|
||||
|
||||
(type, state_key) -> event_id
|
||||
|
||||
For an outlier, this is {}
|
||||
|
||||
Note that this is a private attribute: it should be accessed via
|
||||
``get_current_state_ids``. _AsyncEventContext impl calculates this
|
||||
on-demand: it will be None until that happens.
|
||||
|
||||
_prev_state_ids: The room state map, excluding this event - ie, the state
|
||||
in ``state_group_before_event``. For a non-state
|
||||
event, this will be the same as _current_state_events.
|
||||
|
||||
Note that it is a completely different thing to prev_group!
|
||||
|
||||
(type, state_key) -> event_id
|
||||
|
||||
For an outlier, this is {}
|
||||
|
||||
As with _current_state_ids, this is a private attribute. It should be
|
||||
accessed via get_prev_state_ids.
|
||||
|
||||
partial_state: if True, we may be storing this event with a temporary,
|
||||
incomplete state.
|
||||
"""
|
||||
|
||||
rejected: Union[bool, str] = False
|
||||
_storage: "Storage"
|
||||
rejected: Union[Literal[False], str] = False
|
||||
_state_group: Optional[int] = None
|
||||
state_group_before_event: Optional[int] = None
|
||||
_state_delta_due_to_event: Optional[StateMap[str]] = None
|
||||
prev_group: Optional[int] = None
|
||||
delta_ids: Optional[StateMap[str]] = None
|
||||
app_service: Optional[ApplicationService] = None
|
||||
|
||||
_current_state_ids: Optional[StateMap[str]] = None
|
||||
_prev_state_ids: Optional[StateMap[str]] = None
|
||||
|
||||
partial_state: bool = False
|
||||
|
||||
@staticmethod
|
||||
def with_state(
|
||||
storage: "Storage",
|
||||
state_group: Optional[int],
|
||||
state_group_before_event: Optional[int],
|
||||
current_state_ids: Optional[StateMap[str]],
|
||||
prev_state_ids: Optional[StateMap[str]],
|
||||
state_delta_due_to_event: Optional[StateMap[str]],
|
||||
partial_state: bool,
|
||||
prev_group: Optional[int] = None,
|
||||
delta_ids: Optional[StateMap[str]] = None,
|
||||
) -> "EventContext":
|
||||
return EventContext(
|
||||
current_state_ids=current_state_ids,
|
||||
prev_state_ids=prev_state_ids,
|
||||
storage=storage,
|
||||
state_group=state_group,
|
||||
state_group_before_event=state_group_before_event,
|
||||
state_delta_due_to_event=state_delta_due_to_event,
|
||||
prev_group=prev_group,
|
||||
delta_ids=delta_ids,
|
||||
partial_state=partial_state,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def for_outlier() -> "EventContext":
|
||||
def for_outlier(
|
||||
storage: "Storage",
|
||||
) -> "EventContext":
|
||||
"""Return an EventContext instance suitable for persisting an outlier event"""
|
||||
return EventContext(
|
||||
current_state_ids={},
|
||||
prev_state_ids={},
|
||||
)
|
||||
return EventContext(storage=storage)
|
||||
|
||||
async def serialize(self, event: EventBase, store: "DataStore") -> JsonDict:
|
||||
"""Converts self to a type that can be serialized as JSON, and then
|
||||
@@ -157,24 +132,14 @@ class EventContext:
|
||||
The serialized event.
|
||||
"""
|
||||
|
||||
# We don't serialize the full state dicts, instead they get pulled out
|
||||
# of the DB on the other side. However, the other side can't figure out
|
||||
# the prev_state_ids, so if we're a state event we include the event
|
||||
# id that we replaced in the state.
|
||||
if event.is_state():
|
||||
prev_state_ids = await self.get_prev_state_ids()
|
||||
prev_state_id = prev_state_ids.get((event.type, event.state_key))
|
||||
else:
|
||||
prev_state_id = None
|
||||
|
||||
return {
|
||||
"prev_state_id": prev_state_id,
|
||||
"event_type": event.type,
|
||||
"event_state_key": event.get_state_key(),
|
||||
"state_group": self._state_group,
|
||||
"state_group_before_event": self.state_group_before_event,
|
||||
"rejected": self.rejected,
|
||||
"prev_group": self.prev_group,
|
||||
"state_delta_due_to_event": _encode_state_dict(
|
||||
self._state_delta_due_to_event
|
||||
),
|
||||
"delta_ids": _encode_state_dict(self.delta_ids),
|
||||
"app_service_id": self.app_service.id if self.app_service else None,
|
||||
"partial_state": self.partial_state,
|
||||
@@ -192,16 +157,16 @@ class EventContext:
|
||||
Returns:
|
||||
The event context.
|
||||
"""
|
||||
context = _AsyncEventContextImpl(
|
||||
context = EventContext(
|
||||
# We use the state_group and prev_state_id stuff to pull the
|
||||
# current_state_ids out of the DB and construct prev_state_ids.
|
||||
storage=storage,
|
||||
prev_state_id=input["prev_state_id"],
|
||||
event_type=input["event_type"],
|
||||
event_state_key=input["event_state_key"],
|
||||
state_group=input["state_group"],
|
||||
state_group_before_event=input["state_group_before_event"],
|
||||
prev_group=input["prev_group"],
|
||||
state_delta_due_to_event=_decode_state_dict(
|
||||
input["state_delta_due_to_event"]
|
||||
),
|
||||
delta_ids=_decode_state_dict(input["delta_ids"]),
|
||||
rejected=input["rejected"],
|
||||
partial_state=input.get("partial_state", False),
|
||||
@@ -249,8 +214,15 @@ class EventContext:
|
||||
if self.rejected:
|
||||
raise RuntimeError("Attempt to access state_ids of rejected event")
|
||||
|
||||
await self._ensure_fetched()
|
||||
return self._current_state_ids
|
||||
assert self._state_delta_due_to_event is not None
|
||||
|
||||
prev_state_ids = await self.get_prev_state_ids()
|
||||
|
||||
if self._state_delta_due_to_event:
|
||||
prev_state_ids = dict(prev_state_ids)
|
||||
prev_state_ids.update(self._state_delta_due_to_event)
|
||||
|
||||
return prev_state_ids
|
||||
|
||||
async def get_prev_state_ids(self) -> StateMap[str]:
|
||||
"""
|
||||
@@ -265,94 +237,10 @@ class EventContext:
|
||||
Maps a (type, state_key) to the event ID of the state event matching
|
||||
this tuple.
|
||||
"""
|
||||
await self._ensure_fetched()
|
||||
# There *should* be previous state IDs now.
|
||||
assert self._prev_state_ids is not None
|
||||
return self._prev_state_ids
|
||||
|
||||
def get_cached_current_state_ids(self) -> Optional[StateMap[str]]:
|
||||
"""Gets the current state IDs if we have them already cached.
|
||||
|
||||
It is an error to access this for a rejected event, since rejected state should
|
||||
not make it into the room state. This method will raise an exception if
|
||||
``rejected`` is set.
|
||||
|
||||
Returns:
|
||||
Returns None if we haven't cached the state or if state_group is None
|
||||
(which happens when the associated event is an outlier).
|
||||
|
||||
Otherwise, returns the the current state IDs.
|
||||
"""
|
||||
if self.rejected:
|
||||
raise RuntimeError("Attempt to access state_ids of rejected event")
|
||||
|
||||
return self._current_state_ids
|
||||
|
||||
async def _ensure_fetched(self) -> None:
|
||||
return None
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class _AsyncEventContextImpl(EventContext):
|
||||
"""
|
||||
An implementation of EventContext which fetches _current_state_ids and
|
||||
_prev_state_ids from the database on demand.
|
||||
|
||||
Attributes:
|
||||
|
||||
_storage
|
||||
|
||||
_fetching_state_deferred: Resolves when *_state_ids have been calculated.
|
||||
None if we haven't started calculating yet
|
||||
|
||||
_event_type: The type of the event the context is associated with.
|
||||
|
||||
_event_state_key: The state_key of the event the context is associated with.
|
||||
|
||||
_prev_state_id: If the event associated with the context is a state event,
|
||||
then `_prev_state_id` is the event_id of the state that was replaced.
|
||||
"""
|
||||
|
||||
# This needs to have a default as we're inheriting
|
||||
_storage: "Storage" = attr.ib(default=None)
|
||||
_prev_state_id: Optional[str] = attr.ib(default=None)
|
||||
_event_type: str = attr.ib(default=None)
|
||||
_event_state_key: Optional[str] = attr.ib(default=None)
|
||||
_fetching_state_deferred: Optional["Deferred[None]"] = attr.ib(default=None)
|
||||
|
||||
async def _ensure_fetched(self) -> None:
|
||||
if not self._fetching_state_deferred:
|
||||
self._fetching_state_deferred = run_in_background(self._fill_out_state)
|
||||
|
||||
await make_deferred_yieldable(self._fetching_state_deferred)
|
||||
|
||||
async def _fill_out_state(self) -> None:
|
||||
"""Called to populate the _current_state_ids and _prev_state_ids
|
||||
attributes by loading from the database.
|
||||
"""
|
||||
if self.state_group is None:
|
||||
# No state group means the event is an outlier. Usually the state_ids dicts are also
|
||||
# pre-set to empty dicts, but they get reset when the context is serialized, so set
|
||||
# them to empty dicts again here.
|
||||
self._current_state_ids = {}
|
||||
self._prev_state_ids = {}
|
||||
return
|
||||
|
||||
current_state_ids = await self._storage.state.get_state_ids_for_group(
|
||||
self.state_group
|
||||
assert self.state_group_before_event is not None
|
||||
return await self._storage.state.get_state_ids_for_group(
|
||||
self.state_group_before_event
|
||||
)
|
||||
# Set this separately so mypy knows current_state_ids is not None.
|
||||
self._current_state_ids = current_state_ids
|
||||
if self._event_state_key is not None:
|
||||
self._prev_state_ids = dict(current_state_ids)
|
||||
|
||||
key = (self._event_type, self._event_state_key)
|
||||
if self._prev_state_id:
|
||||
self._prev_state_ids[key] = self._prev_state_id
|
||||
else:
|
||||
self._prev_state_ids.pop(key, None)
|
||||
else:
|
||||
self._prev_state_ids = current_state_ids
|
||||
|
||||
|
||||
def _encode_state_dict(
|
||||
|
||||
@@ -27,11 +27,12 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
from synapse.api.errors import Codes
|
||||
from synapse.rest.media.v1._base import FileInfo
|
||||
from synapse.rest.media.v1.media_storage import ReadableFileWrapper
|
||||
from synapse.spam_checker_api import RegistrationBehaviour
|
||||
from synapse.spam_checker_api import ALLOW, Decision, RegistrationBehaviour
|
||||
from synapse.types import RoomAlias, UserProfile
|
||||
from synapse.util.async_helpers import maybe_awaitable
|
||||
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import synapse.events
|
||||
@@ -39,17 +40,34 @@ if TYPE_CHECKING:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DEPRECATED_BOOL = bool
|
||||
|
||||
CHECK_EVENT_FOR_SPAM_CALLBACK = Callable[
|
||||
["synapse.events.EventBase"],
|
||||
Awaitable[Union[bool, str]],
|
||||
Awaitable[Union[ALLOW, Codes, str, DEPRECATED_BOOL]],
|
||||
]
|
||||
USER_MAY_JOIN_ROOM_CALLBACK = Callable[
|
||||
[str, str, bool], Awaitable[Union[ALLOW, Codes, DEPRECATED_BOOL]]
|
||||
]
|
||||
USER_MAY_INVITE_CALLBACK = Callable[
|
||||
[str, str, str], Awaitable[Union[ALLOW, Codes, DEPRECATED_BOOL]]
|
||||
]
|
||||
USER_MAY_SEND_3PID_INVITE_CALLBACK = Callable[
|
||||
[str, str, str, str], Awaitable[Union[ALLOW, Codes, DEPRECATED_BOOL]]
|
||||
]
|
||||
USER_MAY_CREATE_ROOM_CALLBACK = Callable[
|
||||
[str], Awaitable[Union[ALLOW, Codes, DEPRECATED_BOOL]]
|
||||
]
|
||||
USER_MAY_CREATE_ROOM_ALIAS_CALLBACK = Callable[
|
||||
[str, RoomAlias], Awaitable[Union[ALLOW, Codes, DEPRECATED_BOOL]]
|
||||
]
|
||||
USER_MAY_PUBLISH_ROOM_CALLBACK = Callable[
|
||||
[str, str], Awaitable[Union[ALLOW, Codes, DEPRECATED_BOOL]]
|
||||
]
|
||||
CHECK_USERNAME_FOR_SPAM_CALLBACK = Callable[
|
||||
[UserProfile], Awaitable[Union[ALLOW, Codes, DEPRECATED_BOOL]]
|
||||
]
|
||||
USER_MAY_JOIN_ROOM_CALLBACK = Callable[[str, str, bool], Awaitable[bool]]
|
||||
USER_MAY_INVITE_CALLBACK = Callable[[str, str, str], Awaitable[bool]]
|
||||
USER_MAY_SEND_3PID_INVITE_CALLBACK = Callable[[str, str, str, str], Awaitable[bool]]
|
||||
USER_MAY_CREATE_ROOM_CALLBACK = Callable[[str], Awaitable[bool]]
|
||||
USER_MAY_CREATE_ROOM_ALIAS_CALLBACK = Callable[[str, RoomAlias], Awaitable[bool]]
|
||||
USER_MAY_PUBLISH_ROOM_CALLBACK = Callable[[str, str], Awaitable[bool]]
|
||||
CHECK_USERNAME_FOR_SPAM_CALLBACK = Callable[[UserProfile], Awaitable[bool]]
|
||||
LEGACY_CHECK_REGISTRATION_FOR_SPAM_CALLBACK = Callable[
|
||||
[
|
||||
Optional[dict],
|
||||
@@ -65,11 +83,11 @@ CHECK_REGISTRATION_FOR_SPAM_CALLBACK = Callable[
|
||||
Collection[Tuple[str, str]],
|
||||
Optional[str],
|
||||
],
|
||||
Awaitable[RegistrationBehaviour],
|
||||
Awaitable[Union[RegistrationBehaviour, Codes]],
|
||||
]
|
||||
CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK = Callable[
|
||||
[ReadableFileWrapper, FileInfo],
|
||||
Awaitable[bool],
|
||||
Awaitable[Union[ALLOW, Codes, DEPRECATED_BOOL]],
|
||||
]
|
||||
|
||||
|
||||
@@ -240,7 +258,7 @@ class SpamChecker:
|
||||
|
||||
async def check_event_for_spam(
|
||||
self, event: "synapse.events.EventBase"
|
||||
) -> Union[bool, str]:
|
||||
) -> Union[ALLOW, Codes, str]:
|
||||
"""Checks if a given event is considered "spammy" by this server.
|
||||
|
||||
If the server considers an event spammy, then it will be rejected if
|
||||
@@ -251,19 +269,29 @@ class SpamChecker:
|
||||
event: the event to be checked
|
||||
|
||||
Returns:
|
||||
True or a string if the event is spammy. If a string is returned it
|
||||
will be used as the error message returned to the user.
|
||||
- on `ALLOW`, the event is considered good (non-spammy) and should
|
||||
be let through. Other spamcheck filters may still reject it.
|
||||
- on `Codes`, the event is considered spammy and is rejected with a specific
|
||||
error message/code.
|
||||
- on `str`, the event is considered spammy and the string is used as error
|
||||
message.
|
||||
"""
|
||||
for callback in self._check_event_for_spam_callbacks:
|
||||
res: Union[bool, str] = await callback(event)
|
||||
if res:
|
||||
res: Union[ALLOW, Codes, str, DEPRECATED_BOOL] = await delay_cancellation(
|
||||
callback(event)
|
||||
)
|
||||
if res is False or res is ALLOW:
|
||||
continue
|
||||
elif res is True:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
return res
|
||||
|
||||
return False
|
||||
return ALLOW
|
||||
|
||||
async def user_may_join_room(
|
||||
self, user_id: str, room_id: str, is_invited: bool
|
||||
) -> bool:
|
||||
) -> Decision:
|
||||
"""Checks if a given users is allowed to join a room.
|
||||
Not called when a user creates a room.
|
||||
|
||||
@@ -273,42 +301,54 @@ class SpamChecker:
|
||||
is_invited: Whether the user is invited into the room
|
||||
|
||||
Returns:
|
||||
Whether the user may join the room
|
||||
- on `ALLOW`, the action is permitted.
|
||||
- on `Codes`, the action is rejected with a specific error message/code.
|
||||
"""
|
||||
for callback in self._user_may_join_room_callbacks:
|
||||
if await callback(user_id, room_id, is_invited) is False:
|
||||
return False
|
||||
may_join_room = await delay_cancellation(
|
||||
callback(user_id, room_id, is_invited)
|
||||
)
|
||||
if may_join_room is True or may_join_room is ALLOW:
|
||||
continue
|
||||
elif may_join_room is False:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
return may_join_room
|
||||
|
||||
return True
|
||||
return ALLOW
|
||||
|
||||
async def user_may_invite(
|
||||
self, inviter_userid: str, invitee_userid: str, room_id: str
|
||||
) -> bool:
|
||||
) -> Decision:
|
||||
"""Checks if a given user may send an invite
|
||||
|
||||
If this method returns false, the invite will be rejected.
|
||||
|
||||
Args:
|
||||
inviter_userid: The user ID of the sender of the invitation
|
||||
invitee_userid: The user ID targeted in the invitation
|
||||
room_id: The room ID
|
||||
|
||||
Returns:
|
||||
True if the user may send an invite, otherwise False
|
||||
- on `ALLOW`, the action is permitted.
|
||||
- on `Codes`, the action is rejected with a specific error message/code.
|
||||
"""
|
||||
for callback in self._user_may_invite_callbacks:
|
||||
if await callback(inviter_userid, invitee_userid, room_id) is False:
|
||||
return False
|
||||
may_invite = await delay_cancellation(
|
||||
callback(inviter_userid, invitee_userid, room_id)
|
||||
)
|
||||
if may_invite is True or may_invite is ALLOW:
|
||||
continue
|
||||
elif may_invite is False:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
return may_invite
|
||||
|
||||
return True
|
||||
return ALLOW
|
||||
|
||||
async def user_may_send_3pid_invite(
|
||||
self, inviter_userid: str, medium: str, address: str, room_id: str
|
||||
) -> bool:
|
||||
) -> Decision:
|
||||
"""Checks if a given user may invite a given threepid into the room
|
||||
|
||||
If this method returns false, the threepid invite will be rejected.
|
||||
|
||||
Note that if the threepid is already associated with a Matrix user ID, Synapse
|
||||
will call user_may_invite with said user ID instead.
|
||||
|
||||
@@ -319,70 +359,94 @@ class SpamChecker:
|
||||
room_id: The room ID
|
||||
|
||||
Returns:
|
||||
True if the user may send the invite, otherwise False
|
||||
- on `ALLOW`, the action is permitted.
|
||||
- on `Codes`, the action is rejected with a specific error message/code.
|
||||
"""
|
||||
for callback in self._user_may_send_3pid_invite_callbacks:
|
||||
if await callback(inviter_userid, medium, address, room_id) is False:
|
||||
return False
|
||||
may_send_3pid_invite = await delay_cancellation(
|
||||
callback(inviter_userid, medium, address, room_id)
|
||||
)
|
||||
if may_send_3pid_invite is True or may_send_3pid_invite is ALLOW:
|
||||
continue
|
||||
elif may_send_3pid_invite is False:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
return may_send_3pid_invite
|
||||
|
||||
return True
|
||||
return ALLOW
|
||||
|
||||
async def user_may_create_room(self, userid: str) -> bool:
|
||||
async def user_may_create_room(self, userid: str) -> Decision:
|
||||
"""Checks if a given user may create a room
|
||||
|
||||
If this method returns false, the creation request will be rejected.
|
||||
|
||||
Args:
|
||||
userid: The ID of the user attempting to create a room
|
||||
|
||||
Returns:
|
||||
True if the user may create a room, otherwise False
|
||||
- on `ALLOW`, the action is permitted.
|
||||
- on `Codes`, the action is rejected with a specific error message/code.
|
||||
"""
|
||||
for callback in self._user_may_create_room_callbacks:
|
||||
if await callback(userid) is False:
|
||||
return False
|
||||
may_create_room = await delay_cancellation(callback(userid))
|
||||
if may_create_room is True or may_create_room is ALLOW:
|
||||
continue
|
||||
elif may_create_room is False:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
return may_create_room
|
||||
|
||||
return True
|
||||
return ALLOW
|
||||
|
||||
async def user_may_create_room_alias(
|
||||
self, userid: str, room_alias: RoomAlias
|
||||
) -> bool:
|
||||
) -> Decision:
|
||||
"""Checks if a given user may create a room alias
|
||||
|
||||
If this method returns false, the association request will be rejected.
|
||||
|
||||
Args:
|
||||
userid: The ID of the user attempting to create a room alias
|
||||
room_alias: The alias to be created
|
||||
|
||||
Returns:
|
||||
True if the user may create a room alias, otherwise False
|
||||
- on `ALLOW`, the action is permitted.
|
||||
- on `Codes`, the action is rejected with a specific error message/code.
|
||||
"""
|
||||
for callback in self._user_may_create_room_alias_callbacks:
|
||||
if await callback(userid, room_alias) is False:
|
||||
return False
|
||||
may_create_room_alias = await delay_cancellation(
|
||||
callback(userid, room_alias)
|
||||
)
|
||||
if may_create_room_alias is True or may_create_room_alias is ALLOW:
|
||||
continue
|
||||
elif may_create_room_alias is False:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
return may_create_room_alias
|
||||
|
||||
return True
|
||||
return ALLOW
|
||||
|
||||
async def user_may_publish_room(self, userid: str, room_id: str) -> bool:
|
||||
async def user_may_publish_room(
|
||||
self, userid: str, room_id: str
|
||||
) -> Union[ALLOW, Codes, DEPRECATED_BOOL]:
|
||||
"""Checks if a given user may publish a room to the directory
|
||||
|
||||
If this method returns false, the publish request will be rejected.
|
||||
|
||||
Args:
|
||||
userid: The user ID attempting to publish the room
|
||||
room_id: The ID of the room that would be published
|
||||
|
||||
Returns:
|
||||
True if the user may publish the room, otherwise False
|
||||
- on `ALLOW`, the action is permitted.
|
||||
- on `Codes`, the action is rejected with a specific error message/code.
|
||||
"""
|
||||
for callback in self._user_may_publish_room_callbacks:
|
||||
if await callback(userid, room_id) is False:
|
||||
return False
|
||||
may_publish_room = await delay_cancellation(callback(userid, room_id))
|
||||
if may_publish_room is True or may_publish_room is ALLOW:
|
||||
continue
|
||||
elif may_publish_room is False:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
return may_publish_room
|
||||
|
||||
return True
|
||||
return ALLOW
|
||||
|
||||
async def check_username_for_spam(self, user_profile: UserProfile) -> bool:
|
||||
async def check_username_for_spam(self, user_profile: UserProfile) -> Decision:
|
||||
"""Checks if a user ID or display name are considered "spammy" by this server.
|
||||
|
||||
If the server considers a username spammy, then it will not be included in
|
||||
@@ -395,15 +459,21 @@ class SpamChecker:
|
||||
* avatar_url
|
||||
|
||||
Returns:
|
||||
True if the user is spammy.
|
||||
- on `ALLOW`, the action is permitted.
|
||||
- on `Codes`, the action is rejected with a specific error message/code.
|
||||
"""
|
||||
for callback in self._check_username_for_spam_callbacks:
|
||||
# Make a copy of the user profile object to ensure the spam checker cannot
|
||||
# modify it.
|
||||
if await callback(user_profile.copy()):
|
||||
return True
|
||||
is_spam = await delay_cancellation(callback(user_profile.copy()))
|
||||
if is_spam is False or is_spam is ALLOW:
|
||||
continue
|
||||
elif is_spam is True:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
return is_spam
|
||||
|
||||
return False
|
||||
return ALLOW
|
||||
|
||||
async def check_registration_for_spam(
|
||||
self,
|
||||
@@ -428,9 +498,11 @@ class SpamChecker:
|
||||
"""
|
||||
|
||||
for callback in self._check_registration_for_spam_callbacks:
|
||||
behaviour = await (
|
||||
behaviour = await delay_cancellation(
|
||||
callback(email_threepid, username, request_info, auth_provider_id)
|
||||
)
|
||||
if isinstance(behaviour, Codes):
|
||||
return behaviour
|
||||
assert isinstance(behaviour, RegistrationBehaviour)
|
||||
if behaviour != RegistrationBehaviour.ALLOW:
|
||||
return behaviour
|
||||
@@ -439,7 +511,7 @@ class SpamChecker:
|
||||
|
||||
async def check_media_file_for_spam(
|
||||
self, file_wrapper: ReadableFileWrapper, file_info: FileInfo
|
||||
) -> bool:
|
||||
) -> Decision:
|
||||
"""Checks if a piece of newly uploaded media should be blocked.
|
||||
|
||||
This will be called for local uploads, downloads of remote media, each
|
||||
@@ -461,19 +533,22 @@ class SpamChecker:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
Args:
|
||||
file: An object that allows reading the contents of the media.
|
||||
file_info: Metadata about the file.
|
||||
|
||||
Returns:
|
||||
True if the media should be blocked or False if it should be
|
||||
allowed.
|
||||
- on `ALLOW`, the action is permitted.
|
||||
- on `Codes`, the action is rejected with a specific error message/code.
|
||||
"""
|
||||
|
||||
for callback in self._check_media_file_for_spam_callbacks:
|
||||
spam = await callback(file_wrapper, file_info)
|
||||
if spam:
|
||||
return True
|
||||
is_spam = await delay_cancellation(callback(file_wrapper, file_info))
|
||||
if is_spam is False or is_spam is ALLOW:
|
||||
continue
|
||||
elif is_spam is True:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
return is_spam
|
||||
|
||||
return False
|
||||
return ALLOW
|
||||
|
||||
@@ -14,12 +14,14 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional, Tuple
|
||||
|
||||
from twisted.internet.defer import CancelledError
|
||||
|
||||
from synapse.api.errors import ModuleFailedException, SynapseError
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.snapshot import EventContext
|
||||
from synapse.storage.roommember import ProfileInfo
|
||||
from synapse.types import Requester, StateMap
|
||||
from synapse.util.async_helpers import maybe_awaitable
|
||||
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
@@ -263,7 +265,11 @@ class ThirdPartyEventRules:
|
||||
|
||||
for callback in self._check_event_allowed_callbacks:
|
||||
try:
|
||||
res, replacement_data = await callback(event, state_events)
|
||||
res, replacement_data = await delay_cancellation(
|
||||
callback(event, state_events)
|
||||
)
|
||||
except CancelledError:
|
||||
raise
|
||||
except SynapseError as e:
|
||||
# FIXME: Being able to throw SynapseErrors is relied upon by
|
||||
# some modules. PR #10386 accidentally broke this ability.
|
||||
@@ -333,8 +339,13 @@ class ThirdPartyEventRules:
|
||||
|
||||
for callback in self._check_threepid_can_be_invited_callbacks:
|
||||
try:
|
||||
if await callback(medium, address, state_events) is False:
|
||||
threepid_can_be_invited = await delay_cancellation(
|
||||
callback(medium, address, state_events)
|
||||
)
|
||||
if threepid_can_be_invited is False:
|
||||
return False
|
||||
except CancelledError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
||||
|
||||
@@ -361,8 +372,13 @@ class ThirdPartyEventRules:
|
||||
|
||||
for callback in self._check_visibility_can_be_modified_callbacks:
|
||||
try:
|
||||
if await callback(room_id, state_events, new_visibility) is False:
|
||||
visibility_can_be_modified = await delay_cancellation(
|
||||
callback(room_id, state_events, new_visibility)
|
||||
)
|
||||
if visibility_can_be_modified is False:
|
||||
return False
|
||||
except CancelledError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
||||
|
||||
@@ -400,8 +416,11 @@ class ThirdPartyEventRules:
|
||||
"""
|
||||
for callback in self._check_can_shutdown_room_callbacks:
|
||||
try:
|
||||
if await callback(user_id, room_id) is False:
|
||||
can_shutdown_room = await delay_cancellation(callback(user_id, room_id))
|
||||
if can_shutdown_room is False:
|
||||
return False
|
||||
except CancelledError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
@@ -422,8 +441,13 @@ class ThirdPartyEventRules:
|
||||
"""
|
||||
for callback in self._check_can_deactivate_user_callbacks:
|
||||
try:
|
||||
if await callback(user_id, by_admin) is False:
|
||||
can_deactivate_user = await delay_cancellation(
|
||||
callback(user_id, by_admin)
|
||||
)
|
||||
if can_deactivate_user is False:
|
||||
return False
|
||||
except CancelledError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to run module API callback %s: %s", callback, e
|
||||
|
||||
@@ -22,12 +22,12 @@ from typing import (
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
|
||||
import attr
|
||||
from frozendict import frozendict
|
||||
|
||||
from synapse.api.constants import EventContentFields, EventTypes, RelationTypes
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
@@ -39,7 +39,6 @@ from . import EventBase
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
# Split strings on "." but not "\." This uses a negative lookbehind assertion for '\'
|
||||
@@ -205,7 +204,9 @@ def _copy_field(src: JsonDict, dst: JsonDict, field: List[str]) -> None:
|
||||
key_to_move = field.pop(-1)
|
||||
sub_dict = src
|
||||
for sub_field in field: # e.g. sub_field => "content"
|
||||
if sub_field in sub_dict and type(sub_dict[sub_field]) in [dict, frozendict]:
|
||||
if sub_field in sub_dict and isinstance(
|
||||
sub_dict[sub_field], collections.abc.Mapping
|
||||
):
|
||||
sub_dict = sub_dict[sub_field]
|
||||
else:
|
||||
return
|
||||
@@ -396,9 +397,6 @@ class EventClientSerializer:
|
||||
clients.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self._msc3440_enabled = hs.config.experimental.msc3440_enabled
|
||||
|
||||
def serialize_event(
|
||||
self,
|
||||
event: Union[JsonDict, EventBase],
|
||||
@@ -406,6 +404,7 @@ class EventClientSerializer:
|
||||
*,
|
||||
config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG,
|
||||
bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None,
|
||||
apply_edits: bool = True,
|
||||
) -> JsonDict:
|
||||
"""Serializes a single event.
|
||||
|
||||
@@ -413,10 +412,10 @@ class EventClientSerializer:
|
||||
event: The event being serialized.
|
||||
time_now: The current time in milliseconds
|
||||
config: Event serialization config
|
||||
bundle_aggregations: Whether to include the bundled aggregations for this
|
||||
event. Only applies to non-state events. (State events never include
|
||||
bundled aggregations.)
|
||||
|
||||
bundle_aggregations: A map from event_id to the aggregations to be bundled
|
||||
into the event.
|
||||
apply_edits: Whether the content of the event should be modified to reflect
|
||||
any replacement in `bundle_aggregations[<event_id>].replace`.
|
||||
Returns:
|
||||
The serialized event
|
||||
"""
|
||||
@@ -428,14 +427,14 @@ class EventClientSerializer:
|
||||
|
||||
# Check if there are any bundled aggregations to include with the event.
|
||||
if bundle_aggregations:
|
||||
event_aggregations = bundle_aggregations.get(event.event_id)
|
||||
if event_aggregations:
|
||||
if event.event_id in bundle_aggregations:
|
||||
self._inject_bundled_aggregations(
|
||||
event,
|
||||
time_now,
|
||||
config,
|
||||
bundle_aggregations[event.event_id],
|
||||
bundle_aggregations,
|
||||
serialized_event,
|
||||
apply_edits=apply_edits,
|
||||
)
|
||||
|
||||
return serialized_event
|
||||
@@ -472,31 +471,49 @@ class EventClientSerializer:
|
||||
event: EventBase,
|
||||
time_now: int,
|
||||
config: SerializeEventConfig,
|
||||
aggregations: "BundledAggregations",
|
||||
bundled_aggregations: Dict[str, "BundledAggregations"],
|
||||
serialized_event: JsonDict,
|
||||
apply_edits: bool,
|
||||
) -> None:
|
||||
"""Potentially injects bundled aggregations into the unsigned portion of the serialized event.
|
||||
|
||||
Args:
|
||||
event: The event being serialized.
|
||||
time_now: The current time in milliseconds
|
||||
aggregations: The bundled aggregation to serialize.
|
||||
serialized_event: The serialized event which may be modified.
|
||||
config: Event serialization config
|
||||
bundled_aggregations: Bundled aggregations to be injected.
|
||||
A map from event_id to aggregation data. Must contain at least an
|
||||
entry for `event`.
|
||||
|
||||
While serializing the bundled aggregations this map may be searched
|
||||
again for additional events in a recursive manner.
|
||||
serialized_event: The serialized event which may be modified.
|
||||
apply_edits: Whether the content of the event should be modified to reflect
|
||||
any replacement in `aggregations.replace`.
|
||||
"""
|
||||
|
||||
# We have already checked that aggregations exist for this event.
|
||||
event_aggregations = bundled_aggregations[event.event_id]
|
||||
|
||||
# The JSON dictionary to be added under the unsigned property of the event
|
||||
# being serialized.
|
||||
serialized_aggregations = {}
|
||||
|
||||
if aggregations.annotations:
|
||||
serialized_aggregations[RelationTypes.ANNOTATION] = aggregations.annotations
|
||||
if event_aggregations.annotations:
|
||||
serialized_aggregations[
|
||||
RelationTypes.ANNOTATION
|
||||
] = event_aggregations.annotations
|
||||
|
||||
if aggregations.references:
|
||||
serialized_aggregations[RelationTypes.REFERENCE] = aggregations.references
|
||||
if event_aggregations.references:
|
||||
serialized_aggregations[
|
||||
RelationTypes.REFERENCE
|
||||
] = event_aggregations.references
|
||||
|
||||
if aggregations.replace:
|
||||
# If there is an edit, apply it to the event.
|
||||
edit = aggregations.replace
|
||||
self._apply_edit(event, serialized_event, edit)
|
||||
if event_aggregations.replace:
|
||||
# If there is an edit, optionally apply it to the event.
|
||||
edit = event_aggregations.replace
|
||||
if apply_edits:
|
||||
self._apply_edit(event, serialized_event, edit)
|
||||
|
||||
# Include information about it in the relations dict.
|
||||
serialized_aggregations[RelationTypes.REPLACE] = {
|
||||
@@ -505,19 +522,16 @@ class EventClientSerializer:
|
||||
"sender": edit.sender,
|
||||
}
|
||||
|
||||
# If this event is the start of a thread, include a summary of the replies.
|
||||
if aggregations.thread:
|
||||
thread = aggregations.thread
|
||||
# Include any threaded replies to this event.
|
||||
if event_aggregations.thread:
|
||||
thread = event_aggregations.thread
|
||||
|
||||
# Don't bundle aggregations as this could recurse forever.
|
||||
serialized_latest_event = serialize_event(
|
||||
thread.latest_event, time_now, config=config
|
||||
serialized_latest_event = self.serialize_event(
|
||||
thread.latest_event,
|
||||
time_now,
|
||||
config=config,
|
||||
bundle_aggregations=bundled_aggregations,
|
||||
)
|
||||
# Manually apply an edit, if one exists.
|
||||
if thread.latest_edit:
|
||||
self._apply_edit(
|
||||
thread.latest_event, serialized_latest_event, thread.latest_edit
|
||||
)
|
||||
|
||||
thread_summary = {
|
||||
"latest_event": serialized_latest_event,
|
||||
@@ -525,8 +539,6 @@ class EventClientSerializer:
|
||||
"current_user_participated": thread.current_user_participated,
|
||||
}
|
||||
serialized_aggregations[RelationTypes.THREAD] = thread_summary
|
||||
if self._msc3440_enabled:
|
||||
serialized_aggregations[RelationTypes.UNSTABLE_THREAD] = thread_summary
|
||||
|
||||
# Include the bundled aggregations in the event.
|
||||
if serialized_aggregations:
|
||||
@@ -569,10 +581,20 @@ class EventClientSerializer:
|
||||
]
|
||||
|
||||
|
||||
def copy_power_levels_contents(
|
||||
old_power_levels: Mapping[str, Union[int, Mapping[str, int]]]
|
||||
_PowerLevel = Union[str, int]
|
||||
|
||||
|
||||
def copy_and_fixup_power_levels_contents(
|
||||
old_power_levels: Mapping[str, Union[_PowerLevel, Mapping[str, _PowerLevel]]]
|
||||
) -> Dict[str, Union[int, Dict[str, int]]]:
|
||||
"""Copy the content of a power_levels event, unfreezing frozendicts along the way
|
||||
"""Copy the content of a power_levels event, unfreezing frozendicts along the way.
|
||||
|
||||
We accept as input power level values which are strings, provided they represent an
|
||||
integer, e.g. `"`100"` instead of 100. Such strings are converted to integers
|
||||
in the returned dictionary (hence "fixup" in the function name).
|
||||
|
||||
Note that future room versions will outlaw such stringy power levels (see
|
||||
https://github.com/matrix-org/matrix-spec/issues/853).
|
||||
|
||||
Raises:
|
||||
TypeError if the input does not look like a valid power levels event content
|
||||
@@ -581,29 +603,47 @@ def copy_power_levels_contents(
|
||||
raise TypeError("Not a valid power-levels content: %r" % (old_power_levels,))
|
||||
|
||||
power_levels: Dict[str, Union[int, Dict[str, int]]] = {}
|
||||
|
||||
for k, v in old_power_levels.items():
|
||||
|
||||
if isinstance(v, int):
|
||||
power_levels[k] = v
|
||||
continue
|
||||
|
||||
if isinstance(v, collections.abc.Mapping):
|
||||
h: Dict[str, int] = {}
|
||||
power_levels[k] = h
|
||||
for k1, v1 in v.items():
|
||||
# we should only have one level of nesting
|
||||
if not isinstance(v1, int):
|
||||
raise TypeError(
|
||||
"Invalid power_levels value for %s.%s: %r" % (k, k1, v1)
|
||||
)
|
||||
h[k1] = v1
|
||||
continue
|
||||
_copy_power_level_value_as_integer(v1, h, k1)
|
||||
|
||||
raise TypeError("Invalid power_levels value for %s: %r" % (k, v))
|
||||
else:
|
||||
_copy_power_level_value_as_integer(v, power_levels, k)
|
||||
|
||||
return power_levels
|
||||
|
||||
|
||||
def _copy_power_level_value_as_integer(
|
||||
old_value: object,
|
||||
power_levels: MutableMapping[str, Any],
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Set `power_levels[key]` to the integer represented by `old_value`.
|
||||
|
||||
:raises TypeError: if `old_value` is not an integer, nor a base-10 string
|
||||
representation of an integer.
|
||||
"""
|
||||
if isinstance(old_value, int):
|
||||
power_levels[key] = old_value
|
||||
return
|
||||
|
||||
if isinstance(old_value, str):
|
||||
try:
|
||||
parsed_value = int(old_value, base=10)
|
||||
except ValueError:
|
||||
# Fall through to the final TypeError.
|
||||
pass
|
||||
else:
|
||||
power_levels[key] = parsed_value
|
||||
return
|
||||
|
||||
raise TypeError(f"Invalid power_levels value for {key}: {old_value}")
|
||||
|
||||
|
||||
def validate_canonicaljson(value: Any) -> None:
|
||||
"""
|
||||
Ensure that the JSON object is valid according to the rules of canonical JSON.
|
||||
@@ -623,7 +663,7 @@ def validate_canonicaljson(value: Any) -> None:
|
||||
# Note that Infinity, -Infinity, and NaN are also considered floats.
|
||||
raise SynapseError(400, "Bad JSON value: float", Codes.BAD_JSON)
|
||||
|
||||
elif isinstance(value, (dict, frozendict)):
|
||||
elif isinstance(value, collections.abc.Mapping):
|
||||
for v in value.values():
|
||||
validate_canonicaljson(v)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user