Compare commits
253 Commits
release-v1
...
v1.46.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d44ee6868 | ||
|
|
df84ad602b | ||
|
|
576921c66a | ||
|
|
b3e843be88 | ||
|
|
e0ef8fe58d | ||
|
|
b615fc35d6 | ||
|
|
f3a4be8700 | ||
|
|
72626b78ef | ||
|
|
2dbef6c10a | ||
|
|
60ad9460c4 | ||
|
|
400f391f71 | ||
|
|
34b0222c2b | ||
|
|
cc75a6b1b2 | ||
|
|
7004f43da1 | ||
|
|
d52c58dfa3 | ||
|
|
8c8e36af0d | ||
|
|
63cbdd8af0 | ||
|
|
c1510c97b5 | ||
|
|
4387b791e0 | ||
|
|
da957a60e8 | ||
|
|
85a09f8b8b | ||
|
|
2b82ec425f | ||
|
|
b9ce53e878 | ||
|
|
b0f03aeb6a | ||
|
|
ba00e20234 | ||
|
|
2d91b6256e | ||
|
|
6408372234 | ||
|
|
0f9adc99ad | ||
|
|
09eff1b3db | ||
|
|
ef7fe09778 | ||
|
|
57501d9194 | ||
|
|
62db603fa0 | ||
|
|
0930e9ae12 | ||
|
|
2c61a318cc | ||
|
|
ee2cee5f52 | ||
|
|
106d99b8cd | ||
|
|
78d5896d19 | ||
|
|
9b016a0fb4 | ||
|
|
522489fbcd | ||
|
|
df95d3aec2 | ||
|
|
0dd0c40329 | ||
|
|
5e0e683541 | ||
|
|
a6c318735d | ||
|
|
95813ff43c | ||
|
|
a21f8c4b41 | ||
|
|
8b1185347a | ||
|
|
191396f4ba | ||
|
|
f3efa0036b | ||
|
|
0170774b19 | ||
|
|
d85bc9a4a7 | ||
|
|
3ab55d43bd | ||
|
|
cc33d9eee2 | ||
|
|
a5d2ea3d08 | ||
|
|
73743b8ad1 | ||
|
|
e8f24b6c35 | ||
|
|
7d70582eb0 | ||
|
|
37b845dabc | ||
|
|
e09be0c87a | ||
|
|
5573133348 | ||
|
|
6a67f3786a | ||
|
|
013e0f9cae | ||
|
|
daf498e099 | ||
|
|
efd0074ab7 | ||
|
|
e2f0b49b3f | ||
|
|
1609ccf8fe | ||
|
|
50d8601581 | ||
|
|
b3698f945c | ||
|
|
b1c1a34f46 | ||
|
|
4d761d24ba | ||
|
|
87c3a6dcc0 | ||
|
|
99a4e5222d | ||
|
|
35d6b914eb | ||
|
|
404444260a | ||
|
|
317e9e415c | ||
|
|
b59f3281d5 | ||
|
|
b3e9b00fb2 | ||
|
|
1f9d0b8a7a | ||
|
|
cdd308845b | ||
|
|
732bbf6737 | ||
|
|
b83e822556 | ||
|
|
2a2b189130 | ||
|
|
8711e15734 | ||
|
|
988de0afb0 | ||
|
|
5dcacdf6d1 | ||
|
|
9abc5f2a05 | ||
|
|
84f5d83257 | ||
|
|
8eaffe013c | ||
|
|
1db9282dfa | ||
|
|
77ea03086c | ||
|
|
333d6f4e84 | ||
|
|
5c35074d85 | ||
|
|
36224e056a | ||
|
|
a18c568516 | ||
|
|
a5871f53ed | ||
|
|
8afa48f7f6 | ||
|
|
f6b62bdc4d | ||
|
|
b8b905c4ea | ||
|
|
9e13cd98af | ||
|
|
6b18eb4430 | ||
|
|
b01e953291 | ||
|
|
60af28c5dd | ||
|
|
8c5255b664 | ||
|
|
406f7bfa17 | ||
|
|
e0f11ae4a5 | ||
|
|
5e29d417fc | ||
|
|
3828dd819b | ||
|
|
4c838112dc | ||
|
|
b742cb2e4a | ||
|
|
a7d22c36db | ||
|
|
1b112840d2 | ||
|
|
593eeac19e | ||
|
|
d51a340019 | ||
|
|
9f23ff78da | ||
|
|
c576598a68 | ||
|
|
51a5da74cc | ||
|
|
797ee7812d | ||
|
|
670a8d9a1e | ||
|
|
eb9ddc8c2e | ||
|
|
49a683d871 | ||
|
|
bb228f3523 | ||
|
|
0b4d5ce5e3 | ||
|
|
e79ee48313 | ||
|
|
7301019d48 | ||
|
|
e0bf34dada | ||
|
|
96fe77c254 | ||
|
|
86af6b2f0e | ||
|
|
52aefd5086 | ||
|
|
f563676c09 | ||
|
|
e564bdd127 | ||
|
|
4e51621064 | ||
|
|
f4b1a9a527 | ||
|
|
829f2a82b0 | ||
|
|
b0460936c8 | ||
|
|
370bca32e6 | ||
|
|
38b7db5885 | ||
|
|
c80878d22a | ||
|
|
f8d0f72b27 | ||
|
|
6744273f0b | ||
|
|
4f00432ce1 | ||
|
|
392863fbf1 | ||
|
|
2faac70e63 | ||
|
|
b2c5e79291 | ||
|
|
3a5b0cbe7a | ||
|
|
787af4a106 | ||
|
|
d099535deb | ||
|
|
cb88ed912b | ||
|
|
6f6e956338 | ||
|
|
7036a7a60a | ||
|
|
660c8c1415 | ||
|
|
eda8c88b84 | ||
|
|
30f0240401 | ||
|
|
730b40dd5e | ||
|
|
2d2c6a41fe | ||
|
|
f7b034a24b | ||
|
|
a0f48ee89d | ||
|
|
d1cbad388f | ||
|
|
a071144a5c | ||
|
|
32072dcdac | ||
|
|
e46ac85d67 | ||
|
|
7e440520c9 | ||
|
|
9e5a429c8b | ||
|
|
d1bf5f7c9d | ||
|
|
7d84d2523a | ||
|
|
44dee1fe8c | ||
|
|
145cb6d08e | ||
|
|
29364145b2 | ||
|
|
3412f5c8d8 | ||
|
|
c4bf48ee6f | ||
|
|
a03ed5e6ae | ||
|
|
3aefc7b66d | ||
|
|
428174f902 | ||
|
|
a19aa8b162 | ||
|
|
176aa55fd5 | ||
|
|
e32b9f44ee | ||
|
|
94b620a5ed | ||
|
|
8cef1ab2ac | ||
|
|
13032b6603 | ||
|
|
1b9ce5e8a6 | ||
|
|
67815cc3db | ||
|
|
5279b9161b | ||
|
|
2be0fde3d6 | ||
|
|
9fd057b8c5 | ||
|
|
62800a8fe3 | ||
|
|
0f007fe009 | ||
|
|
8aaa4b7b5d | ||
|
|
2622b28c5c | ||
|
|
37bb93d181 | ||
|
|
eb2c7e51c4 | ||
|
|
2b9d174791 | ||
|
|
bc69d49362 | ||
|
|
c3ccad7785 | ||
|
|
3c50192d3f | ||
|
|
a8bbf08576 | ||
|
|
707d5e4e48 | ||
|
|
d37841787a | ||
|
|
f7768f62cb | ||
|
|
6c83c27107 | ||
|
|
d138187045 | ||
|
|
b10257e879 | ||
|
|
ea01d4c2de | ||
|
|
0420d4e6a5 | ||
|
|
bb7fdd821b | ||
|
|
85551b7a85 | ||
|
|
261c9763c4 | ||
|
|
50022cff96 | ||
|
|
fa74536384 | ||
|
|
7f3352743e | ||
|
|
e704cc2a48 | ||
|
|
90d9fc7505 | ||
|
|
a7304adc7d | ||
|
|
47854c71e9 | ||
|
|
a10988983a | ||
|
|
dcfd864970 | ||
|
|
e584534403 | ||
|
|
aa2c027792 | ||
|
|
26f2bfedbf | ||
|
|
f78b68a96b | ||
|
|
03db6701d5 | ||
|
|
8f2a52766b | ||
|
|
6fc8be9a1b | ||
|
|
9391de3f37 | ||
|
|
52913d56a5 | ||
|
|
724aef9a87 | ||
|
|
80828eda06 | ||
|
|
4ecf51812e | ||
|
|
a2d7195e01 | ||
|
|
51e2db3598 | ||
|
|
4054dfa409 | ||
|
|
b25a494779 | ||
|
|
ebd8baf61f | ||
|
|
ba7a91aea5 | ||
|
|
2843058a8b | ||
|
|
5fca3c8ae6 | ||
|
|
ee557b5375 | ||
|
|
706b0e41a1 | ||
|
|
60453315bd | ||
|
|
6a751ff5e0 | ||
|
|
f455b0e420 | ||
|
|
b3590614da | ||
|
|
437961744c | ||
|
|
6b6bb81b23 | ||
|
|
b4c1af8cea | ||
|
|
bfb4b858a9 | ||
|
|
3eba047d38 | ||
|
|
b93259082c | ||
|
|
8c7a531e27 | ||
|
|
145c006ef7 | ||
|
|
1c555527b3 | ||
|
|
8eb7cb2e0d | ||
|
|
14b8c0476f | ||
|
|
51e1b96d04 | ||
|
|
b996782df5 | ||
|
|
319b8b6bef |
57
.ci/scripts/test_export_data_command.sh
Executable file
57
.ci/scripts/test_export_data_command.sh
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Test for the export-data admin command against sqlite and postgres
|
||||
|
||||
set -xe
|
||||
cd `dirname $0`/../..
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Run the export-data command on the sqlite test database
|
||||
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
dir="/tmp/export_data/rooms"
|
||||
if [ -d "$dir" ]; then
|
||||
echo "Command successful, this test passes"
|
||||
else
|
||||
echo "No output directories found, the command fails against a sqlite database."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# Run the export-data command on postgres database
|
||||
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data2
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
dir2="/tmp/export_data2/rooms"
|
||||
if [ -d "$dir2" ]; then
|
||||
echo "Command successful, this test passes"
|
||||
else
|
||||
echo "No output directories found, the command fails against a postgres database."
|
||||
exit 1
|
||||
fi
|
||||
@@ -25,7 +25,7 @@ python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
scripts-dev/update_database --database-config .ci/sqlite-config.yaml
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
@@ -46,7 +46,7 @@ echo "--- Prepare empty SQLite database"
|
||||
# we do this by deleting the sqlite db, and then doing the same again.
|
||||
rm .ci/test_db.db
|
||||
|
||||
scripts-dev/update_database --database-config .ci/sqlite-config.yaml
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py \
|
||||
|
||||
2
.github/CODEOWNERS
vendored
Normal file
2
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Automatically request reviews from the synapse-core team when a pull request comes in.
|
||||
* @matrix-org/synapse-core
|
||||
1
.github/workflows/docs.yaml
vendored
1
.github/workflows/docs.yaml
vendored
@@ -61,6 +61,5 @@ jobs:
|
||||
uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
keep_files: true
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
59
.github/workflows/tests.yml
vendored
59
.github/workflows/tests.yml
vendored
@@ -76,22 +76,25 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9"]
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
|
||||
database: ["sqlite"]
|
||||
toxenv: ["py"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.9"
|
||||
toxenv: "py-noextras,combine"
|
||||
- python-version: "3.10"
|
||||
toxenv: "py-noextras"
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.6"
|
||||
database: "postgres"
|
||||
postgres-version: "9.6"
|
||||
toxenv: "py"
|
||||
|
||||
# Newest Python with PostgreSQL
|
||||
- python-version: "3.9"
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
database: "postgres"
|
||||
postgres-version: "13"
|
||||
postgres-version: "14"
|
||||
toxenv: "py"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -111,7 +114,7 @@ jobs:
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: tox -e py,combine
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
@@ -119,6 +122,8 @@ jobs:
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
@@ -143,6 +148,8 @@ jobs:
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
@@ -169,10 +176,12 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
- run: tox -e py,combine
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
@@ -192,6 +201,7 @@ jobs:
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
env:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
@@ -243,6 +253,35 @@ jobs:
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
export-data:
|
||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: [linting-done, portdb]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.9"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
|
||||
portdb:
|
||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
@@ -255,8 +294,8 @@ jobs:
|
||||
- python-version: "3.6"
|
||||
postgres-version: "9.6"
|
||||
|
||||
- python-version: "3.9"
|
||||
postgres-version: "13"
|
||||
- python-version: "3.10"
|
||||
postgres-version: "14"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
|
||||
2
.github/workflows/twisted_trunk.yml
vendored
2
.github/workflows/twisted_trunk.yml
vendored
@@ -33,6 +33,8 @@ jobs:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -40,6 +40,7 @@ __pycache__/
|
||||
/.coverage*
|
||||
/.mypy_cache/
|
||||
/.tox
|
||||
/.tox-pg-container
|
||||
/build/
|
||||
/coverage.*
|
||||
/dist/
|
||||
|
||||
324
CHANGES.md
324
CHANGES.md
@@ -1,3 +1,327 @@
|
||||
Synapse 1.46.0 (2021-11-02)
|
||||
===========================
|
||||
|
||||
The cause of the [performance regression affecting Synapse 1.44](https://github.com/matrix-org/synapse/issues/11049) has been identified and fixed. ([\#11177](https://github.com/matrix-org/synapse/issues/11177))
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in v1.46.0rc1 where URL previews of some XML documents would fail. ([\#11196](https://github.com/matrix-org/synapse/issues/11196))
|
||||
|
||||
|
||||
Synapse 1.46.0rc1 (2021-10-27)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add support for Ubuntu 21.10 "Impish Indri". ([\#11024](https://github.com/matrix-org/synapse/issues/11024))
|
||||
- Port the Password Auth Providers module interface to the new generic interface. ([\#10548](https://github.com/matrix-org/synapse/issues/10548), [\#11180](https://github.com/matrix-org/synapse/issues/11180))
|
||||
- Experimental support for the thread relation defined in [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440). ([\#11088](https://github.com/matrix-org/synapse/issues/11088), [\#11181](https://github.com/matrix-org/synapse/issues/11181), [\#11192](https://github.com/matrix-org/synapse/issues/11192))
|
||||
- Users admin API can now also modify user type in addition to allowing it to be set on user creation. ([\#11174](https://github.com/matrix-org/synapse/issues/11174))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Newly-created public rooms are now only assigned an alias if the room's creation has not been blocked by permission settings. Contributed by @AndrewFerr. ([\#10930](https://github.com/matrix-org/synapse/issues/10930))
|
||||
- Fix a long-standing bug which meant that events received over federation were sometimes incorrectly accepted into the room state. ([\#11001](https://github.com/matrix-org/synapse/issues/11001), [\#11009](https://github.com/matrix-org/synapse/issues/11009), [\#11012](https://github.com/matrix-org/synapse/issues/11012))
|
||||
- Fix 500 error on `/messages` when the server accumulates more than 5 backwards extremities at a given depth for a room. ([\#11027](https://github.com/matrix-org/synapse/issues/11027))
|
||||
- Fix a bug where setting a user's `external_id` via the admin API returns 500 and deletes user's existing external mappings if that external ID is already mapped. ([\#11051](https://github.com/matrix-org/synapse/issues/11051))
|
||||
- Fix a long-standing bug where users excluded from the user directory were added into the directory if they belonged to a room which became public or private. ([\#11075](https://github.com/matrix-org/synapse/issues/11075))
|
||||
- Fix a long-standing bug when attempting to preview URLs which are in the `windows-1252` character encoding. ([\#11077](https://github.com/matrix-org/synapse/issues/11077), [\#11089](https://github.com/matrix-org/synapse/issues/11089))
|
||||
- Fix broken export-data admin command and add test script checking the command to CI. ([\#11078](https://github.com/matrix-org/synapse/issues/11078))
|
||||
- Show an error when timestamp in seconds is provided to the `/purge_media_cache` Admin API. ([\#11101](https://github.com/matrix-org/synapse/issues/11101))
|
||||
- Fix local users who left all their rooms being removed from the user directory, even if the `search_all_users` config option was enabled. ([\#11103](https://github.com/matrix-org/synapse/issues/11103))
|
||||
- Fix a bug which caused the module API's `get_user_ip_and_agents` function to always fail on workers. `get_user_ip_and_agents` was introduced in 1.44.0 and did not function correctly on worker processes at the time. ([\#11112](https://github.com/matrix-org/synapse/issues/11112))
|
||||
- Identity server connection is no longer ignoring `ip_range_whitelist`. ([\#11120](https://github.com/matrix-org/synapse/issues/11120))
|
||||
- Fix a bug introduced in Synapse 1.45.0 breaking the configuration file parsing script. ([\#11145](https://github.com/matrix-org/synapse/issues/11145))
|
||||
- Fix a performance regression introduced in 1.44.0 which could cause client requests to time out when making large numbers of outbound requests. ([\#11177](https://github.com/matrix-org/synapse/issues/11177), [\#11190](https://github.com/matrix-org/synapse/issues/11190))
|
||||
- Resolve and share `state_groups` for all [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) historical events in batch. ([\#10975](https://github.com/matrix-org/synapse/issues/10975))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix broken links relating to module API deprecation in the upgrade notes. ([\#11069](https://github.com/matrix-org/synapse/issues/11069))
|
||||
- Add more information about what happens when a user is deactivated. ([\#11083](https://github.com/matrix-org/synapse/issues/11083))
|
||||
- Clarify the the sample log config can be copied from the documentation without issue. ([\#11092](https://github.com/matrix-org/synapse/issues/11092))
|
||||
- Update the admin API documentation with an updated list of the characters allowed in registration tokens. ([\#11093](https://github.com/matrix-org/synapse/issues/11093))
|
||||
- Document Synapse's behaviour when dealing with multiple modules registering the same callbacks and/or handlers for the same HTTP endpoints. ([\#11096](https://github.com/matrix-org/synapse/issues/11096))
|
||||
- Fix instances of `[example]{.title-ref}` in the upgrade documentation as a result of prior RST to Markdown conversion. ([\#11118](https://github.com/matrix-org/synapse/issues/11118))
|
||||
- Document the version of Synapse each module callback was introduced in. ([\#11132](https://github.com/matrix-org/synapse/issues/11132))
|
||||
- Document the version of Synapse that introduced each module API method. ([\#11183](https://github.com/matrix-org/synapse/issues/11183))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
- Fix spurious warnings about losing the logging context on the `ReplicationCommandHandler` when losing the replication connection. ([\#10984](https://github.com/matrix-org/synapse/issues/10984))
|
||||
- Include rejected status when we log events. ([\#11008](https://github.com/matrix-org/synapse/issues/11008))
|
||||
- Add some extra logging to the event persistence code. ([\#11014](https://github.com/matrix-org/synapse/issues/11014))
|
||||
- Rearrange the internal workings of the incremental user directory updates. ([\#11035](https://github.com/matrix-org/synapse/issues/11035))
|
||||
- Fix a long-standing bug where users excluded from the directory could still be added to the `users_who_share_private_rooms` table after a regular user joins a private room. ([\#11143](https://github.com/matrix-org/synapse/issues/11143))
|
||||
- Add and improve type hints. ([\#10972](https://github.com/matrix-org/synapse/issues/10972), [\#11055](https://github.com/matrix-org/synapse/issues/11055), [\#11066](https://github.com/matrix-org/synapse/issues/11066), [\#11076](https://github.com/matrix-org/synapse/issues/11076), [\#11095](https://github.com/matrix-org/synapse/issues/11095), [\#11109](https://github.com/matrix-org/synapse/issues/11109), [\#11121](https://github.com/matrix-org/synapse/issues/11121), [\#11146](https://github.com/matrix-org/synapse/issues/11146))
|
||||
- Mark the Synapse package as containing type annotations and fix export declarations so that Synapse pluggable modules may be type checked against Synapse. ([\#11054](https://github.com/matrix-org/synapse/issues/11054))
|
||||
- Remove dead code from `MediaFilePaths`. ([\#11056](https://github.com/matrix-org/synapse/issues/11056))
|
||||
- Be more lenient when parsing oEmbed response versions. ([\#11065](https://github.com/matrix-org/synapse/issues/11065))
|
||||
- Create a separate module for the retention configuration. ([\#11070](https://github.com/matrix-org/synapse/issues/11070))
|
||||
- Clean up some of the federation event authentication code for clarity. ([\#11115](https://github.com/matrix-org/synapse/issues/11115), [\#11116](https://github.com/matrix-org/synapse/issues/11116), [\#11122](https://github.com/matrix-org/synapse/issues/11122))
|
||||
- Add docstrings and comments to the application service ephemeral event sending code. ([\#11138](https://github.com/matrix-org/synapse/issues/11138))
|
||||
- Update the `sign_json` script to support inline configuration of the signing key. ([\#11139](https://github.com/matrix-org/synapse/issues/11139))
|
||||
- Fix broken link in the docker image README. ([\#11144](https://github.com/matrix-org/synapse/issues/11144))
|
||||
- Always dump logs from unit tests during CI runs. ([\#11068](https://github.com/matrix-org/synapse/issues/11068))
|
||||
- Add tests for `MediaFilePaths` class. ([\#11057](https://github.com/matrix-org/synapse/issues/11057))
|
||||
- Simplify the user admin API tests. ([\#11048](https://github.com/matrix-org/synapse/issues/11048))
|
||||
- Add a test for the workaround introduced in [\#11042](https://github.com/matrix-org/synapse/pull/11042) concerning the behaviour of third-party rule modules and `SynapseError`s. ([\#11071](https://github.com/matrix-org/synapse/issues/11071))
|
||||
|
||||
|
||||
Synapse 1.45.1 (2021-10-20)
|
||||
===========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Revert change to counting of deactivated users towards the monthly active users limit, introduced in 1.45.0rc1. ([\#11127](https://github.com/matrix-org/synapse/issues/11127))
|
||||
|
||||
|
||||
Synapse 1.45.0 (2021-10-19)
|
||||
===========================
|
||||
|
||||
No functional changes since Synapse 1.45.0rc2.
|
||||
|
||||
Known Issues
|
||||
------------
|
||||
|
||||
- A suspected [performance regression](https://github.com/matrix-org/synapse/issues/11049) which was first reported after the release of 1.44.0 remains unresolved.
|
||||
|
||||
We have not been able to identify a probable cause. Affected users report that setting up a federation sender worker appears to alleviate symptoms of the regression.
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Reword changelog to clarify concerns about a suspected performance regression in 1.44.0. ([\#11117](https://github.com/matrix-org/synapse/issues/11117))
|
||||
|
||||
|
||||
Synapse 1.45.0rc2 (2021-10-14)
|
||||
==============================
|
||||
|
||||
This release candidate [fixes](https://github.com/matrix-org/synapse/issues/11053) a user directory [bug](https://github.com/matrix-org/synapse/issues/11025) present in 1.45.0rc1.
|
||||
|
||||
Known Issues
|
||||
------------
|
||||
|
||||
- A suspected [performance regression](https://github.com/matrix-org/synapse/issues/11049) which was first reported after the release of 1.44.0 remains unresolved.
|
||||
|
||||
We have not been able to identify a probable cause. Affected users report that setting up a federation sender worker appears to alleviate symptoms of the regression.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing bug when using multiple event persister workers where events were not correctly sent down `/sync` due to a race. ([\#11045](https://github.com/matrix-org/synapse/issues/11045))
|
||||
- Fix a bug introduced in Synapse 1.45.0rc1 where the user directory would stop updating if it processed an event from a
|
||||
user not in the `users` table. ([\#11053](https://github.com/matrix-org/synapse/issues/11053))
|
||||
- Fix a bug introduced in Synapse 1.44.0 when logging errors during oEmbed processing. ([\#11061](https://github.com/matrix-org/synapse/issues/11061))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add an 'approximate difference' method to `StateFilter`. ([\#10825](https://github.com/matrix-org/synapse/issues/10825))
|
||||
- Fix inconsistent behavior of `get_last_client_by_ip` when reporting data that has not been stored in the database yet. ([\#10970](https://github.com/matrix-org/synapse/issues/10970))
|
||||
- Fix a bug introduced in Synapse 1.21.0 that causes opentracing and Prometheus metrics for replication requests to be measured incorrectly. ([\#10996](https://github.com/matrix-org/synapse/issues/10996))
|
||||
- Ensure that cache config tests do not share state. ([\#11036](https://github.com/matrix-org/synapse/issues/11036))
|
||||
|
||||
|
||||
Synapse 1.45.0rc1 (2021-10-12)
|
||||
==============================
|
||||
|
||||
**Note:** Media storage providers module that read from Synapse's configuration need changes as of this version, see the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#upgrading-to-v1450) for more information.
|
||||
|
||||
Known Issues
|
||||
------------
|
||||
|
||||
- We are investigating [a performance issue](https://github.com/matrix-org/synapse/issues/11049) which was reported after the release of 1.44.0.
|
||||
- We are aware of [a bug](https://github.com/matrix-org/synapse/issues/11025) with the user directory when using application services. A second release candidate is expected which will resolve this.
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069) support to `/account/whoami`. ([\#9655](https://github.com/matrix-org/synapse/issues/9655))
|
||||
- Support autodiscovery of oEmbed previews. ([\#10822](https://github.com/matrix-org/synapse/issues/10822))
|
||||
- Add a `user_may_send_3pid_invite` spam checker callback for modules to allow or deny 3PID invites. ([\#10894](https://github.com/matrix-org/synapse/issues/10894))
|
||||
- Add a spam checker callback to allow or deny room joins. ([\#10910](https://github.com/matrix-org/synapse/issues/10910))
|
||||
- Include an `update_synapse_database` script in the distribution. Contributed by @Fizzadar at Beeper. ([\#10954](https://github.com/matrix-org/synapse/issues/10954))
|
||||
- Include exception information in JSON logging output. Contributed by @Fizzadar at Beeper. ([\#11028](https://github.com/matrix-org/synapse/issues/11028))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a minor bug in the response to `/_matrix/client/r0/voip/turnServer`. Contributed by @lukaslihotzki. ([\#10922](https://github.com/matrix-org/synapse/issues/10922))
|
||||
- Fix a bug where empty `yyyy-mm-dd/` directories would be left behind in the media store's `url_cache_thumbnails/` directory. ([\#10924](https://github.com/matrix-org/synapse/issues/10924))
|
||||
- Fix a bug introduced in Synapse v1.40.0 where the signature checks for room version 8 and 9 could be applied to earlier room versions in some situations. ([\#10927](https://github.com/matrix-org/synapse/issues/10927))
|
||||
- Fix a long-standing bug wherein deactivated users still count towards the monthly active users limit. ([\#10947](https://github.com/matrix-org/synapse/issues/10947))
|
||||
- Fix a long-standing bug which meant that events received over federation were sometimes incorrectly accepted into the room state. ([\#10956](https://github.com/matrix-org/synapse/issues/10956))
|
||||
- Fix a long-standing bug where rebuilding the user directory wouldn't exclude support and deactivated users. ([\#10960](https://github.com/matrix-org/synapse/issues/10960))
|
||||
- Fix [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send` endpoint rejecting subsequent batches with unknown batch ID error in existing room versions from the room creator. ([\#10962](https://github.com/matrix-org/synapse/issues/10962))
|
||||
- Fix a bug that could leak local users' per-room nicknames and avatars when the user directory is rebuilt. ([\#10981](https://github.com/matrix-org/synapse/issues/10981))
|
||||
- Fix a long-standing bug where the remainder of a batch of user directory changes would be silently dropped if the server left a room early in the batch. ([\#10982](https://github.com/matrix-org/synapse/issues/10982))
|
||||
- Correct a bugfix introduced in Synapse v1.44.0 that would catch the wrong error if a connection is lost before a response could be written to it. ([\#10995](https://github.com/matrix-org/synapse/issues/10995))
|
||||
- Fix a long-standing bug where local users' per-room nicknames/avatars were visible to anyone who could see you in the user directory. ([\#11002](https://github.com/matrix-org/synapse/issues/11002))
|
||||
- Fix a long-standing bug where a user's per-room nickname/avatar would overwrite their profile in the user directory when a room was made public. ([\#11003](https://github.com/matrix-org/synapse/issues/11003))
|
||||
- Work around a regression, introduced in Synapse v1.39.0, that caused `SynapseError`s raised by the experimental third-party rules module callback `check_event_allowed` to be ignored. ([\#11042](https://github.com/matrix-org/synapse/issues/11042))
|
||||
- Fix a bug in [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) insertion events in rooms that could cause cross-talk/conflicts between batches. ([\#10877](https://github.com/matrix-org/synapse/issues/10877))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Change wording ("reference homeserver") in Synapse repository documentation. Contributed by @maxkratz. ([\#10971](https://github.com/matrix-org/synapse/issues/10971))
|
||||
- Fix a dead URL in development documentation (SAML) and change wording from "Riot" to "Element". Contributed by @maxkratz. ([\#10973](https://github.com/matrix-org/synapse/issues/10973))
|
||||
- Add additional content to the Welcome and Overview page of the documentation. ([\#10990](https://github.com/matrix-org/synapse/issues/10990))
|
||||
- Update links to MSCs in documentation. Contributed by @dklimpel. ([\#10991](https://github.com/matrix-org/synapse/issues/10991))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Improve type hinting in `synapse.util`. ([\#10888](https://github.com/matrix-org/synapse/issues/10888))
|
||||
- Add further type hints to `synapse.storage.util`. ([\#10892](https://github.com/matrix-org/synapse/issues/10892))
|
||||
- Fix type hints to be compatible with an upcoming change to Twisted. ([\#10895](https://github.com/matrix-org/synapse/issues/10895))
|
||||
- Update utility code to handle C implementations of frozendict. ([\#10902](https://github.com/matrix-org/synapse/issues/10902))
|
||||
- Drop old functionality which maintained database compatibility with Synapse versions before v1.31. ([\#10903](https://github.com/matrix-org/synapse/issues/10903))
|
||||
- Clean-up configuration helper classes for the `ServerConfig` class. ([\#10915](https://github.com/matrix-org/synapse/issues/10915))
|
||||
- Use direct references to config flags. ([\#10916](https://github.com/matrix-org/synapse/issues/10916), [\#10959](https://github.com/matrix-org/synapse/issues/10959), [\#10985](https://github.com/matrix-org/synapse/issues/10985))
|
||||
- Clean up some of the federation event authentication code for clarity. ([\#10926](https://github.com/matrix-org/synapse/issues/10926), [\#10940](https://github.com/matrix-org/synapse/issues/10940), [\#10986](https://github.com/matrix-org/synapse/issues/10986), [\#10987](https://github.com/matrix-org/synapse/issues/10987), [\#10988](https://github.com/matrix-org/synapse/issues/10988), [\#11010](https://github.com/matrix-org/synapse/issues/11010), [\#11011](https://github.com/matrix-org/synapse/issues/11011))
|
||||
- Refactor various parts of the codebase to use `RoomVersion` objects instead of room version identifier strings. ([\#10934](https://github.com/matrix-org/synapse/issues/10934))
|
||||
- Refactor user directory tests in preparation for upcoming changes. ([\#10935](https://github.com/matrix-org/synapse/issues/10935))
|
||||
- Include the event id in the logcontext when handling PDUs received over federation. ([\#10936](https://github.com/matrix-org/synapse/issues/10936))
|
||||
- Fix logged errors in unit tests. ([\#10939](https://github.com/matrix-org/synapse/issues/10939))
|
||||
- Fix a broken test to ensure that consent configuration works during registration. ([\#10945](https://github.com/matrix-org/synapse/issues/10945))
|
||||
- Add type hints to filtering classes. ([\#10958](https://github.com/matrix-org/synapse/issues/10958))
|
||||
- Add type-hint to `HomeserverTestcase.setup_test_homeserver`. ([\#10961](https://github.com/matrix-org/synapse/issues/10961))
|
||||
- Fix the test utility function `create_room_as` so that `is_public=True` will explicitly set the `visibility` parameter of room creation requests to `public`. Contributed by @AndrewFerr. ([\#10963](https://github.com/matrix-org/synapse/issues/10963))
|
||||
- Make the release script more robust and transparent. ([\#10966](https://github.com/matrix-org/synapse/issues/10966))
|
||||
- Refactor [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send` mega function into smaller handler functions. ([\#10974](https://github.com/matrix-org/synapse/issues/10974))
|
||||
- Log stack traces when a missing opentracing span is detected. ([\#10983](https://github.com/matrix-org/synapse/issues/10983))
|
||||
- Update GHA config to run tests against Python 3.10 and PostgreSQL 14. ([\#10992](https://github.com/matrix-org/synapse/issues/10992))
|
||||
- Fix a long-standing bug where `ReadWriteLock`s could drop logging contexts on exit. ([\#10993](https://github.com/matrix-org/synapse/issues/10993))
|
||||
- Add a `CODEOWNERS` file to automatically request reviews from the `@matrix-org/synapse-core` team on new pull requests. ([\#10994](https://github.com/matrix-org/synapse/issues/10994))
|
||||
- Add further type hints to `synapse.state`. ([\#11004](https://github.com/matrix-org/synapse/issues/11004))
|
||||
- Remove the deprecated `BaseHandler` object. ([\#11005](https://github.com/matrix-org/synapse/issues/11005))
|
||||
- Bump mypy version for CI to 0.910, and pull in new type stubs for dependencies. ([\#11006](https://github.com/matrix-org/synapse/issues/11006))
|
||||
- Fix CI to run the unit tests without optional deps. ([\#11017](https://github.com/matrix-org/synapse/issues/11017))
|
||||
- Ensure that cache config tests do not share state. ([\#11019](https://github.com/matrix-org/synapse/issues/11019))
|
||||
- Add additional type hints to `synapse.server_notices`. ([\#11021](https://github.com/matrix-org/synapse/issues/11021))
|
||||
- Add additional type hints for `synapse.push`. ([\#11023](https://github.com/matrix-org/synapse/issues/11023))
|
||||
- When installing the optional developer dependencies, also include the dependencies needed for type-checking and unit testing. ([\#11034](https://github.com/matrix-org/synapse/issues/11034))
|
||||
- Remove unnecessary list comprehension from `synapse_port_db` to satisfy code style requirements. ([\#11043](https://github.com/matrix-org/synapse/issues/11043))
|
||||
|
||||
|
||||
Synapse 1.44.0 (2021-10-05)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.44.0rc3.
|
||||
|
||||
|
||||
Synapse 1.44.0rc3 (2021-10-04)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse v1.40.0 where changing a user's display name or avatar in a restricted room would cause an authentication error. ([\#10933](https://github.com/matrix-org/synapse/issues/10933))
|
||||
- Fix `/admin/whois/{user_id}` endpoint, which was broken in v1.44.0rc1. ([\#10968](https://github.com/matrix-org/synapse/issues/10968))
|
||||
|
||||
|
||||
Synapse 1.44.0rc2 (2021-09-30)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in v1.44.0rc1 which caused the experimental [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send` endpoint to return a 500 error. ([\#10938](https://github.com/matrix-org/synapse/issues/10938))
|
||||
- Fix a bug introduced in v1.44.0rc1 which prevented sending presence events to application services. ([\#10944](https://github.com/matrix-org/synapse/issues/10944))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Minor updates to the installation instructions. ([\#10919](https://github.com/matrix-org/synapse/issues/10919))
|
||||
|
||||
|
||||
Synapse 1.44.0rc1 (2021-09-29)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Only allow the [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send?chunk_id=xxx` endpoint to connect to an already existing insertion event. ([\#10776](https://github.com/matrix-org/synapse/issues/10776))
|
||||
- Improve oEmbed URL previews by processing the author name, photo, and video information. ([\#10814](https://github.com/matrix-org/synapse/issues/10814), [\#10819](https://github.com/matrix-org/synapse/issues/10819))
|
||||
- Speed up responding with large JSON objects to requests. ([\#10868](https://github.com/matrix-org/synapse/issues/10868), [\#10905](https://github.com/matrix-org/synapse/issues/10905))
|
||||
- Add a `user_may_create_room_with_invites` spam checker callback to allow modules to allow or deny a room creation request based on the invites and/or 3PID invites it includes. ([\#10898](https://github.com/matrix-org/synapse/issues/10898))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing bug that caused an `AssertionError` when purging history in certain rooms. Contributed by @Kokokokoka. ([\#10690](https://github.com/matrix-org/synapse/issues/10690))
|
||||
- Fix a long-standing bug which caused deactivated users that were later reactivated to be missing from the user directory. ([\#10782](https://github.com/matrix-org/synapse/issues/10782))
|
||||
- Fix a long-standing bug that caused unbanning a user by sending a membership event to fail. Contributed by @aaronraimist. ([\#10807](https://github.com/matrix-org/synapse/issues/10807))
|
||||
- Fix a long-standing bug where logging contexts would go missing when federation requests time out. ([\#10810](https://github.com/matrix-org/synapse/issues/10810))
|
||||
- Fix a long-standing bug causing an error in the deprecated `/initialSync` endpoint when using the undocumented `from` and `to` parameters. ([\#10827](https://github.com/matrix-org/synapse/issues/10827))
|
||||
- Fix a bug causing the `remove_stale_pushers` background job to repeatedly fail and log errors. This bug affected Synapse servers that had been upgraded from version 1.28 or older and are using SQLite. ([\#10843](https://github.com/matrix-org/synapse/issues/10843))
|
||||
- Fix a long-standing bug in Unicode support of the room search admin API breaking search for rooms with non-ASCII characters. ([\#10859](https://github.com/matrix-org/synapse/issues/10859))
|
||||
- Fix a bug introduced in Synapse 1.37.0 which caused `knock` membership events which we sent to remote servers to be incorrectly stored in the local database. ([\#10873](https://github.com/matrix-org/synapse/issues/10873))
|
||||
- Fix invalidating one-time key count cache after claiming keys. The bug was introduced in Synapse v1.41.0. Contributed by Tulir at Beeper. ([\#10875](https://github.com/matrix-org/synapse/issues/10875))
|
||||
- Fix a long-standing bug causing application service users to be subject to MAU blocking if the MAU limit had been reached, even if configured not to be blocked. ([\#10881](https://github.com/matrix-org/synapse/issues/10881))
|
||||
- Fix a long-standing bug which could cause events pulled over federation to be incorrectly rejected. ([\#10907](https://github.com/matrix-org/synapse/issues/10907))
|
||||
- Fix a long-standing bug causing URL cache files to be stored in storage providers. Server admins may safely delete the `url_cache/` and `url_cache_thumbnails/` directories from any configured storage providers to reclaim space. ([\#10911](https://github.com/matrix-org/synapse/issues/10911))
|
||||
- Fix a long-standing bug leading to race conditions when creating media store and config directories. ([\#10913](https://github.com/matrix-org/synapse/issues/10913))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix some crashes in the Module API example code, by adding JSON encoding/decoding. ([\#10845](https://github.com/matrix-org/synapse/issues/10845))
|
||||
- Add developer documentation about experimental configuration flags. ([\#10865](https://github.com/matrix-org/synapse/issues/10865))
|
||||
- Properly remove deleted files from GitHub pages when generating the documentation. ([\#10869](https://github.com/matrix-org/synapse/issues/10869))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Fix GitHub Actions config so we can run sytest on synapse from parallel branches. ([\#10659](https://github.com/matrix-org/synapse/issues/10659))
|
||||
- Split out [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) meta events to their own fields in the `/batch_send` response. ([\#10777](https://github.com/matrix-org/synapse/issues/10777))
|
||||
- Add missing type hints to REST servlets. ([\#10785](https://github.com/matrix-org/synapse/issues/10785), [\#10817](https://github.com/matrix-org/synapse/issues/10817))
|
||||
- Simplify the internal logic which maintains the user directory database tables. ([\#10796](https://github.com/matrix-org/synapse/issues/10796))
|
||||
- Use direct references to config flags. ([\#10812](https://github.com/matrix-org/synapse/issues/10812), [\#10885](https://github.com/matrix-org/synapse/issues/10885), [\#10893](https://github.com/matrix-org/synapse/issues/10893), [\#10897](https://github.com/matrix-org/synapse/issues/10897))
|
||||
- Specify the type of token in generic "Invalid token" error messages. ([\#10815](https://github.com/matrix-org/synapse/issues/10815))
|
||||
- Make `StateFilter` frozen so it is hashable. ([\#10816](https://github.com/matrix-org/synapse/issues/10816))
|
||||
- Fix a long-standing bug where an `m.room.message` event containing a null byte would cause an internal server error. ([\#10820](https://github.com/matrix-org/synapse/issues/10820))
|
||||
- Add type hints to the state database. ([\#10823](https://github.com/matrix-org/synapse/issues/10823))
|
||||
- Opt out of cache expiry for `get_users_who_share_room_with_user`, to hopefully improve `/sync` performance when you
|
||||
haven't synced recently. ([\#10826](https://github.com/matrix-org/synapse/issues/10826))
|
||||
- Track cache eviction rates more finely in Prometheus's monitoring. ([\#10829](https://github.com/matrix-org/synapse/issues/10829))
|
||||
- Add missing type hints to `synapse.handlers`. ([\#10831](https://github.com/matrix-org/synapse/issues/10831), [\#10856](https://github.com/matrix-org/synapse/issues/10856))
|
||||
- Extend the Module API to let plug-ins check whether an ID is local and to access IP + User Agent data. ([\#10833](https://github.com/matrix-org/synapse/issues/10833))
|
||||
- Factor out PNG image data to a constant to be used in several tests. ([\#10834](https://github.com/matrix-org/synapse/issues/10834))
|
||||
- Add a test to ensure state events sent by modules get persisted correctly. ([\#10835](https://github.com/matrix-org/synapse/issues/10835))
|
||||
- Rename [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) fields and event types from `chunk` to `batch` to match the `/batch_send` endpoint. ([\#10838](https://github.com/matrix-org/synapse/issues/10838))
|
||||
- Rename [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send` query parameter from `?prev_event` to more obvious usage with `?prev_event_id`. ([\#10839](https://github.com/matrix-org/synapse/issues/10839))
|
||||
- Add type hints to `synapse.http.site`. ([\#10867](https://github.com/matrix-org/synapse/issues/10867))
|
||||
- Include outlier status when we log V2 or V3 events. ([\#10879](https://github.com/matrix-org/synapse/issues/10879))
|
||||
- Break down Grafana's cache expiry time series based on reason for eviction, c.f. [\#10829](https://github.com/matrix-org/synapse/issues/10829). ([\#10880](https://github.com/matrix-org/synapse/issues/10880))
|
||||
- Clean up some of the federation event authentication code for clarity. ([\#10883](https://github.com/matrix-org/synapse/issues/10883), [\#10884](https://github.com/matrix-org/synapse/issues/10884), [\#10896](https://github.com/matrix-org/synapse/issues/10896), [\#10901](https://github.com/matrix-org/synapse/issues/10901))
|
||||
- Allow the `.` and `~` characters when creating registration tokens as per the change to [MSC3231](https://github.com/matrix-org/matrix-doc/pull/3231). ([\#10887](https://github.com/matrix-org/synapse/issues/10887))
|
||||
- Clean up some unnecessary parentheses in places around the codebase. ([\#10889](https://github.com/matrix-org/synapse/issues/10889))
|
||||
- Improve type hinting in the user directory code. ([\#10891](https://github.com/matrix-org/synapse/issues/10891))
|
||||
- Update development testing script `test_postgresql.sh` to use a supported Python version and make re-runs quicker. ([\#10906](https://github.com/matrix-org/synapse/issues/10906))
|
||||
- Document and summarize changes in schema version `61` – `64`. ([\#10917](https://github.com/matrix-org/synapse/issues/10917))
|
||||
- Update release script to sign the newly created git tags. ([\#10925](https://github.com/matrix-org/synapse/issues/10925))
|
||||
- Fix Debian builds due to `dh-virtualenv` no longer being able to build their docs. ([\#10931](https://github.com/matrix-org/synapse/issues/10931))
|
||||
|
||||
|
||||
Synapse 1.43.0 (2021-09-21)
|
||||
===========================
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
include synapse/py.typed
|
||||
recursive-include synapse/storage *.sql
|
||||
recursive-include synapse/storage *.sql.postgres
|
||||
recursive-include synapse/storage *.sql.sqlite
|
||||
|
||||
11
README.rst
11
README.rst
@@ -55,11 +55,8 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
||||
generation of fully open and interoperable messaging and VoIP apps for the
|
||||
internet.
|
||||
|
||||
Synapse is a reference "homeserver" implementation of Matrix from the core
|
||||
development team at matrix.org, written in Python/Twisted. It is intended to
|
||||
showcase the concept of Matrix and let folks see the spec in the context of a
|
||||
codebase and let you run your own homeserver and generally help bootstrap the
|
||||
ecosystem.
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
team, written in Python 3/Twisted.
|
||||
|
||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||
a Matrix homeserver. The homeserver stores all their personal chat history and
|
||||
@@ -288,7 +285,7 @@ Quick start
|
||||
|
||||
Before setting up a development environment for synapse, make sure you have the
|
||||
system dependencies (such as the python header files) installed - see
|
||||
`Installing from source <https://matrix-org.github.io/synapse/latest/setup/installation.html#installing-from-source>`_.
|
||||
`Platform-specific prerequisites <https://matrix-org.github.io/synapse/latest/setup/installation.html#platform-specific-prerequisites>`_.
|
||||
|
||||
To check out a synapse for development, clone the git repo into a working
|
||||
directory of your choice::
|
||||
@@ -301,7 +298,7 @@ to install using pip and a virtualenv::
|
||||
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,test]"
|
||||
pip install -e ".[all,dev]"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env. If any dependencies fail to install,
|
||||
|
||||
12
book.toml
12
book.toml
@@ -34,14 +34,6 @@ additional-css = [
|
||||
"docs/website_files/table-of-contents.css",
|
||||
"docs/website_files/remove-nav-buttons.css",
|
||||
"docs/website_files/indent-section-headers.css",
|
||||
"docs/website_files/version-picker.css",
|
||||
]
|
||||
additional-js = [
|
||||
"docs/website_files/table-of-contents.js",
|
||||
"docs/website_files/version-picker.js",
|
||||
"docs/website_files/version.js",
|
||||
]
|
||||
theme = "docs/website_files/theme"
|
||||
|
||||
[preprocessor.schema_versions]
|
||||
command = "./scripts-dev/schema_versions.py"
|
||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||
theme = "docs/website_files/theme"
|
||||
@@ -6785,7 +6785,7 @@
|
||||
"expr": "rate(synapse_util_caches_cache:evicted_size{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{name}} {{job}}-{{index}}",
|
||||
"legendFormat": "{{name}} ({{reason}}) {{job}}-{{index}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
@@ -10888,5 +10888,5 @@
|
||||
"timezone": "",
|
||||
"title": "Synapse",
|
||||
"uid": "000000012",
|
||||
"version": 99
|
||||
"version": 100
|
||||
}
|
||||
68
debian/changelog
vendored
68
debian/changelog
vendored
@@ -1,3 +1,71 @@
|
||||
matrix-synapse-py3 (1.46.0) stable; urgency=medium
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
* Compress debs with xz, to fix incompatibility of impish debs with reprepro.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.46.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Nov 2021 13:22:53 +0000
|
||||
|
||||
matrix-synapse-py3 (1.46.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.46.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Oct 2021 14:04:04 +0100
|
||||
|
||||
matrix-synapse-py3 (1.45.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.45.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Oct 2021 11:58:27 +0100
|
||||
|
||||
matrix-synapse-py3 (1.45.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.45.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Oct 2021 11:18:53 +0100
|
||||
|
||||
matrix-synapse-py3 (1.45.0~rc2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.45.0~rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 14 Oct 2021 10:58:24 +0100
|
||||
|
||||
matrix-synapse-py3 (1.45.0~rc1) stable; urgency=medium
|
||||
|
||||
[ Nick @ Beeper ]
|
||||
* Include an `update_synapse_database` script in the distribution.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.45.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Oct 2021 10:46:27 +0100
|
||||
|
||||
matrix-synapse-py3 (1.44.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.44.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Oct 2021 13:43:57 +0100
|
||||
|
||||
matrix-synapse-py3 (1.44.0~rc3) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.44.0~rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 04 Oct 2021 14:57:22 +0100
|
||||
|
||||
matrix-synapse-py3 (1.44.0~rc2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.44.0~rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 30 Sep 2021 12:39:10 +0100
|
||||
|
||||
matrix-synapse-py3 (1.44.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.44.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Sep 2021 13:41:28 +0100
|
||||
|
||||
matrix-synapse-py3 (1.43.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.43.0.
|
||||
|
||||
1
debian/matrix-synapse-py3.links
vendored
1
debian/matrix-synapse-py3.links
vendored
@@ -3,3 +3,4 @@ opt/venvs/matrix-synapse/bin/register_new_matrix_user usr/bin/register_new_matri
|
||||
opt/venvs/matrix-synapse/bin/synapse_port_db usr/bin/synapse_port_db
|
||||
opt/venvs/matrix-synapse/bin/synapse_review_recent_signups usr/bin/synapse_review_recent_signups
|
||||
opt/venvs/matrix-synapse/bin/synctl usr/bin/synctl
|
||||
opt/venvs/matrix-synapse/bin/update_synapse_database usr/bin/update_synapse_database
|
||||
|
||||
6
debian/rules
vendored
6
debian/rules
vendored
@@ -51,5 +51,11 @@ override_dh_shlibdeps:
|
||||
override_dh_virtualenv:
|
||||
./debian/build_virtualenv
|
||||
|
||||
override_dh_builddeb:
|
||||
# force the compression to xzip, to stop dpkg-deb on impish defaulting to zstd
|
||||
# (which requires reprepro 5.3.0-1.3, which is currently only in 'experimental' in Debian:
|
||||
# https://metadata.ftp-master.debian.org/changelogs/main/r/reprepro/reprepro_5.3.0-1.3_changelog)
|
||||
dh_builddeb -- -Zxz
|
||||
|
||||
%:
|
||||
dh $@ --with python-virtualenv
|
||||
|
||||
@@ -47,8 +47,9 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
&& cd /dh-virtualenv \
|
||||
&& env DEBIAN_FRONTEND=noninteractive mk-build-deps -ri -t "apt-get -y --no-install-recommends"
|
||||
|
||||
# build it
|
||||
RUN cd /dh-virtualenv && dpkg-buildpackage -us -uc -b
|
||||
# Build it. Note that building the docs doesn't work due to differences in
|
||||
# Sphinx APIs across versions/distros.
|
||||
RUN cd /dh-virtualenv && DEB_BUILD_OPTIONS=nodoc dpkg-buildpackage -us -uc -b
|
||||
|
||||
###
|
||||
### Stage 1
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Use the Sytest image that comes with a lot of the build dependencies
|
||||
# pre-installed
|
||||
FROM matrixdotorg/sytest:latest
|
||||
FROM matrixdotorg/sytest:bionic
|
||||
|
||||
# The Sytest image doesn't come with python, so install that
|
||||
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
||||
@@ -8,5 +8,23 @@ RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
||||
# We need tox to run the tests in run_pg_tests.sh
|
||||
RUN python3 -m pip install tox
|
||||
|
||||
ADD run_pg_tests.sh /pg_tests.sh
|
||||
ENTRYPOINT /pg_tests.sh
|
||||
# Initialise the db
|
||||
RUN su -c '/usr/lib/postgresql/10/bin/initdb -D /var/lib/postgresql/data -E "UTF-8" --lc-collate="C.UTF-8" --lc-ctype="C.UTF-8" --username=postgres' postgres
|
||||
|
||||
# Add a user with our UID and GID so that files get created on the host owned
|
||||
# by us, not root.
|
||||
ARG UID
|
||||
ARG GID
|
||||
RUN groupadd --gid $GID user
|
||||
RUN useradd --uid $UID --gid $GID --groups sudo --no-create-home user
|
||||
|
||||
# Ensure we can start postgres by sudo-ing as the postgres user.
|
||||
RUN apt-get update && apt-get -qq install -y sudo
|
||||
RUN echo "user ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
ADD run_pg_tests.sh /run_pg_tests.sh
|
||||
# Use the "exec form" of ENTRYPOINT (https://docs.docker.com/engine/reference/builder/#entrypoint)
|
||||
# so that we can `docker run` this container and pass arguments to pg_tests.sh
|
||||
ENTRYPOINT ["/run_pg_tests.sh"]
|
||||
|
||||
USER user
|
||||
|
||||
@@ -226,4 +226,5 @@ healthcheck:
|
||||
## Using jemalloc
|
||||
|
||||
Jemalloc is embedded in the image and will be used instead of the default allocator.
|
||||
You can read about jemalloc by reading the Synapse [README](../README.rst).
|
||||
You can read about jemalloc by reading the Synapse
|
||||
[README](https://github.com/matrix-org/synapse/blob/HEAD/README.rst#help-synapse-is-slow-and-eats-all-my-ram-cpu).
|
||||
|
||||
@@ -10,11 +10,10 @@ set -e
|
||||
# Set PGUSER so Synapse's tests know what user to connect to the database with
|
||||
export PGUSER=postgres
|
||||
|
||||
# Initialise & start the database
|
||||
su -c '/usr/lib/postgresql/9.6/bin/initdb -D /var/lib/postgresql/data -E "UTF-8" --lc-collate="en_US.UTF-8" --lc-ctype="en_US.UTF-8" --username=postgres' postgres
|
||||
su -c '/usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres
|
||||
# Start the database
|
||||
sudo -u postgres /usr/lib/postgresql/10/bin/pg_ctl -w -D /var/lib/postgresql/data start
|
||||
|
||||
# Run the tests
|
||||
cd /src
|
||||
export TRIAL_FLAGS="-j 4"
|
||||
tox --workdir=/tmp -e py35-postgres
|
||||
tox --workdir=./.tox-pg-container -e py36-postgres "$@"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
## Historical Note
|
||||
This document was originally written to guide server admins through the upgrade
|
||||
path towards Synapse 1.0. Specifically,
|
||||
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/master/proposals/1711-x509-for-federation.md)
|
||||
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/main/proposals/1711-x509-for-federation.md)
|
||||
required that all servers present valid TLS certificates on their federation
|
||||
API. Admins were encouraged to achieve compliance from version 0.99.0 (released
|
||||
in February 2019) ahead of version 1.0 (released June 2019) enforcing the
|
||||
@@ -282,7 +282,7 @@ coffin of the Perspectives project (which was already pretty dead). So, the
|
||||
Spec Core Team decided that a better approach would be to mandate valid TLS
|
||||
certificates for federation alongside the rest of the Web. More details can be
|
||||
found in
|
||||
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/master/proposals/1711-x509-for-federation.md#background-the-failure-of-the-perspectives-approach).
|
||||
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/main/proposals/1711-x509-for-federation.md#background-the-failure-of-the-perspectives-approach).
|
||||
|
||||
This results in a breaking change, which is disruptive, but absolutely critical
|
||||
for the security model. However, the existence of Let's Encrypt as a trivial
|
||||
|
||||
@@ -6,9 +6,9 @@ Please update any links to point to the new website instead.
|
||||
## About
|
||||
|
||||
This directory currently holds a series of markdown files documenting how to install, use
|
||||
and develop Synapse, the reference Matrix homeserver. The documentation is readable directly
|
||||
from this repository, but it is recommended to instead browse through the
|
||||
[website](https://matrix-org.github.io/synapse) for easier discoverability.
|
||||
and develop Synapse. The documentation is readable directly from this repository, but it is
|
||||
recommended to instead browse through the [website](https://matrix-org.github.io/synapse) for
|
||||
easier discoverability.
|
||||
|
||||
## Adding to the documentation
|
||||
|
||||
|
||||
@@ -43,6 +43,7 @@
|
||||
- [Third-party rules callbacks](modules/third_party_rules_callbacks.md)
|
||||
- [Presence router callbacks](modules/presence_router_callbacks.md)
|
||||
- [Account validity callbacks](modules/account_validity_callbacks.md)
|
||||
- [Password auth provider callbacks](modules/password_auth_provider_callbacks.md)
|
||||
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
||||
- [Workers](workers.md)
|
||||
- [Using `synctl` with Workers](synctl_workers.md)
|
||||
@@ -74,6 +75,7 @@
|
||||
- [Testing]()
|
||||
- [OpenTracing](opentracing.md)
|
||||
- [Database Schemas](development/database_schema.md)
|
||||
- [Experimental features](development/experimental_features.md)
|
||||
- [Synapse Architecture]()
|
||||
- [Log Contexts](log_contexts.md)
|
||||
- [Replication](replication.md)
|
||||
|
||||
@@ -257,9 +257,9 @@ POST /_synapse/admin/v1/media/<server_name>/delete?before_ts=<before_ts>
|
||||
URL Parameters
|
||||
|
||||
* `server_name`: string - The name of your local server (e.g `matrix.org`).
|
||||
* `before_ts`: string representing a positive integer - Unix timestamp in ms.
|
||||
* `before_ts`: string representing a positive integer - Unix timestamp in milliseconds.
|
||||
Files that were last used before this timestamp will be deleted. It is the timestamp of
|
||||
last access and not the timestamp creation.
|
||||
last access, not the timestamp when the file was created.
|
||||
* `size_gt`: Optional - string representing a positive integer - Size of the media in bytes.
|
||||
Files that are larger will be deleted. Defaults to `0`.
|
||||
* `keep_profiles`: Optional - string representing a boolean - Switch to also delete files
|
||||
@@ -302,7 +302,7 @@ POST /_synapse/admin/v1/purge_media_cache?before_ts=<unix_timestamp_in_ms>
|
||||
|
||||
URL Parameters
|
||||
|
||||
* `unix_timestamp_in_ms`: string representing a positive integer - Unix timestamp in ms.
|
||||
* `unix_timestamp_in_ms`: string representing a positive integer - Unix timestamp in milliseconds.
|
||||
All cached media that was last accessed before this timestamp will be removed.
|
||||
|
||||
Response:
|
||||
|
||||
@@ -50,7 +50,8 @@ It returns a JSON body like the following:
|
||||
"auth_provider": "<provider2>",
|
||||
"external_id": "<user_id_provider_2>"
|
||||
}
|
||||
]
|
||||
],
|
||||
"user_type": null
|
||||
}
|
||||
```
|
||||
|
||||
@@ -97,7 +98,8 @@ with a body of:
|
||||
],
|
||||
"avatar_url": "<avatar_url>",
|
||||
"admin": false,
|
||||
"deactivated": false
|
||||
"deactivated": false,
|
||||
"user_type": null
|
||||
}
|
||||
```
|
||||
|
||||
@@ -135,6 +137,9 @@ Body parameters:
|
||||
unchanged on existing accounts and set to `false` for new accounts.
|
||||
A user cannot be erased by deactivating with this API. For details on
|
||||
deactivating users see [Deactivate Account](#deactivate-account).
|
||||
- `user_type` - string or null, optional. If provided, the user type will be
|
||||
adjusted. If `null` given, the user type will be cleared. Other
|
||||
allowed options are: `bot` and `support`.
|
||||
|
||||
If the user already exists then optional parameters default to the current value.
|
||||
|
||||
@@ -341,6 +346,7 @@ The following actions are performed when deactivating an user:
|
||||
- Remove all 3PIDs from the homeserver
|
||||
- Delete all devices and E2EE keys
|
||||
- Delete all access tokens
|
||||
- Delete all pushers
|
||||
- Delete the password hash
|
||||
- Removal from all rooms the user is a member of
|
||||
- Remove the user from the user directory
|
||||
@@ -354,6 +360,15 @@ is set to `true`:
|
||||
- Remove the user's avatar URL
|
||||
- Mark the user as erased
|
||||
|
||||
The following actions are **NOT** performed. The list may be incomplete.
|
||||
|
||||
- Remove mappings of SSO IDs
|
||||
- [Delete media uploaded](#delete-media-uploaded-by-a-user) by user (included avatar images)
|
||||
- Delete sent and received messages
|
||||
- Delete E2E cross-signing keys
|
||||
- Remove the user's creation (registration) timestamp
|
||||
- [Remove rate limit overrides](#override-ratelimiting-for-users)
|
||||
- Remove from monthly active users
|
||||
|
||||
## Reset password
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ setup a *virtualenv*, as follows:
|
||||
cd path/where/you/have/cloned/the/repository
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,lint,mypy,test]"
|
||||
pip install -e ".[all,dev]"
|
||||
pip install tox
|
||||
```
|
||||
|
||||
@@ -63,7 +63,7 @@ TBD
|
||||
|
||||
# 5. Get in touch.
|
||||
|
||||
Join our developer community on Matrix: #synapse-dev:matrix.org !
|
||||
Join our developer community on Matrix: [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org)!
|
||||
|
||||
|
||||
# 6. Pick an issue.
|
||||
@@ -170,6 +170,53 @@ To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`:
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG trial tests
|
||||
```
|
||||
|
||||
### Running tests under PostgreSQL
|
||||
|
||||
Invoking `trial` as above will use an in-memory SQLite database. This is great for
|
||||
quick development and testing. However, we recommend using a PostgreSQL database
|
||||
in production (and indeed, we have some code paths specific to each database).
|
||||
This means that we need to run our unit tests against PostgreSQL too. Our CI does
|
||||
this automatically for pull requests and release candidates, but it's sometimes
|
||||
useful to reproduce this locally.
|
||||
|
||||
To do so, [configure Postgres](../postgres.md) and run `trial` with the
|
||||
following environment variables matching your configuration:
|
||||
|
||||
- `SYNAPSE_POSTGRES` to anything nonempty
|
||||
- `SYNAPSE_POSTGRES_HOST`
|
||||
- `SYNAPSE_POSTGRES_USER`
|
||||
- `SYNAPSE_POSTGRES_PASSWORD`
|
||||
|
||||
For example:
|
||||
|
||||
```shell
|
||||
export SYNAPSE_POSTGRES=1
|
||||
export SYNAPSE_POSTGRES_HOST=localhost
|
||||
export SYNAPSE_POSTGRES_USER=postgres
|
||||
export SYNAPSE_POSTGRES_PASSWORD=mydevenvpassword
|
||||
trial
|
||||
```
|
||||
|
||||
#### Prebuilt container
|
||||
|
||||
Since configuring PostgreSQL can be fiddly, we can make use of a pre-made
|
||||
Docker container to set up PostgreSQL and run our tests for us. To do so, run
|
||||
|
||||
```shell
|
||||
scripts-dev/test_postgresql.sh
|
||||
```
|
||||
|
||||
Any extra arguments to the script will be passed to `tox` and then to `trial`,
|
||||
so we can run a specific test in this container with e.g.
|
||||
|
||||
```shell
|
||||
scripts-dev/test_postgresql.sh tests.replication.test_sharded_event_persister.EventPersisterShardTestCase
|
||||
```
|
||||
|
||||
The container creates a folder in your Synapse checkout called
|
||||
`.tox-pg-container` and uses this as a tox environment. The output of any
|
||||
`trial` runs goes into `_trial_temp` in your synapse source directory — the same
|
||||
as running `trial` directly on your host machine.
|
||||
|
||||
## Run the integration tests ([Sytest](https://github.com/matrix-org/sytest)).
|
||||
|
||||
|
||||
37
docs/development/experimental_features.md
Normal file
37
docs/development/experimental_features.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# Implementing experimental features in Synapse
|
||||
|
||||
It can be desirable to implement "experimental" features which are disabled by
|
||||
default and must be explicitly enabled via the Synapse configuration. This is
|
||||
applicable for features which:
|
||||
|
||||
* Are unstable in the Matrix spec (e.g. those defined by an MSC that has not yet been merged).
|
||||
* Developers are not confident in their use by general Synapse administrators/users
|
||||
(e.g. a feature is incomplete, buggy, performs poorly, or needs further testing).
|
||||
|
||||
Note that this only really applies to features which are expected to be desirable
|
||||
to a broad audience. The [module infrastructure](../modules/index.md) should
|
||||
instead be investigated for non-standard features.
|
||||
|
||||
Guarding experimental features behind configuration flags should help with some
|
||||
of the following scenarios:
|
||||
|
||||
* Ensure that clients do not assume that unstable features exist (failing
|
||||
gracefully if they do not).
|
||||
* Unstable features do not become de-facto standards and can be removed
|
||||
aggressively (since only those who have opted-in will be affected).
|
||||
* Ease finding the implementation of unstable features in Synapse (for future
|
||||
removal or stabilization).
|
||||
* Ease testing a feature (or removal of feature) due to enabling/disabling without
|
||||
code changes. It also becomes possible to ask for wider testing, if desired.
|
||||
|
||||
Experimental configuration flags should be disabled by default (requiring Synapse
|
||||
administrators to explicitly opt-in), although there are situations where it makes
|
||||
sense (from a product point-of-view) to enable features by default. This is
|
||||
expected and not an issue.
|
||||
|
||||
It is not a requirement for experimental features to be behind a configuration flag,
|
||||
but one should be used if unsure.
|
||||
|
||||
New experimental configuration flags should be added under the `experimental`
|
||||
configuration key (see the `synapse.config.experimental` file) and either explain
|
||||
(briefly) what is being enabled, or include the MSC number.
|
||||
@@ -1,10 +1,9 @@
|
||||
# How to test SAML as a developer without a server
|
||||
|
||||
https://capriza.github.io/samling/samling.html (https://github.com/capriza/samling) is a great
|
||||
resource for being able to tinker with the SAML options within Synapse without needing to
|
||||
deploy and configure a complicated software stack.
|
||||
https://fujifish.github.io/samling/samling.html (https://github.com/fujifish/samling) is a great resource for being able to tinker with the
|
||||
SAML options within Synapse without needing to deploy and configure a complicated software stack.
|
||||
|
||||
To make Synapse (and therefore Riot) use it:
|
||||
To make Synapse (and therefore Element) use it:
|
||||
|
||||
1. Use the samling.html URL above or deploy your own and visit the IdP Metadata tab.
|
||||
2. Copy the XML to your clipboard.
|
||||
@@ -26,9 +25,9 @@ To make Synapse (and therefore Riot) use it:
|
||||
the dependencies are installed and ready to go.
|
||||
7. Restart Synapse.
|
||||
|
||||
Then in Riot:
|
||||
Then in Element:
|
||||
|
||||
1. Visit the login page with a Riot pointing at your homeserver.
|
||||
1. Visit the login page and point Element towards your homeserver using the `public_baseurl` above.
|
||||
2. Click the Single Sign-On button.
|
||||
3. On the samling page, enter a Name Identifier and add a SAML Attribute for `uid=your_localpart`.
|
||||
The response must also be signed.
|
||||
|
||||
@@ -25,16 +25,14 @@ When Synapse is asked to preview a URL it does the following:
|
||||
3. Kicks off a background process to generate a preview:
|
||||
1. Checks the database cache by URL and timestamp and returns the result if it
|
||||
has not expired and was successful (a 2xx return code).
|
||||
2. Checks if the URL matches an oEmbed pattern. If it does, fetch the oEmbed
|
||||
response. If this is an image, replace the URL to fetch and continue. If
|
||||
if it is HTML content, use the HTML as the document and continue.
|
||||
3. If it doesn't match an oEmbed pattern, downloads the URL and stores it
|
||||
into a file via the media storage provider and saves the local media
|
||||
metadata.
|
||||
5. If the media is an image:
|
||||
2. Checks if the URL matches an [oEmbed](https://oembed.com/) pattern. If it
|
||||
does, update the URL to download.
|
||||
3. Downloads the URL and stores it into a file via the media storage provider
|
||||
and saves the local media metadata.
|
||||
4. If the media is an image:
|
||||
1. Generates thumbnails.
|
||||
2. Generates an Open Graph response based on image properties.
|
||||
6. If the media is HTML:
|
||||
5. If the media is HTML:
|
||||
1. Decodes the HTML via the stored file.
|
||||
2. Generates an Open Graph response from the HTML.
|
||||
3. If an image exists in the Open Graph response:
|
||||
@@ -42,6 +40,13 @@ When Synapse is asked to preview a URL it does the following:
|
||||
provider and saves the local media metadata.
|
||||
2. Generates thumbnails.
|
||||
3. Updates the Open Graph response based on image properties.
|
||||
6. If the media is JSON and an oEmbed URL was found:
|
||||
1. Convert the oEmbed response to an Open Graph response.
|
||||
2. If a thumbnail or image is in the oEmbed response:
|
||||
1. Downloads the URL and stores it into a file via the media storage
|
||||
provider and saves the local media metadata.
|
||||
2. Generates thumbnails.
|
||||
3. Updates the Open Graph response based on image properties.
|
||||
7. Stores the result in the database cache.
|
||||
4. Returns the result.
|
||||
|
||||
|
||||
@@ -9,6 +9,8 @@ The available account validity callbacks are:
|
||||
|
||||
### `is_user_expired`
|
||||
|
||||
_First introduced in Synapse v1.39.0_
|
||||
|
||||
```python
|
||||
async def is_user_expired(user: str) -> Optional[bool]
|
||||
```
|
||||
@@ -22,8 +24,15 @@ If the module returns `True`, the current request will be denied with the error
|
||||
`ORG_MATRIX_EXPIRED_ACCOUNT` and the HTTP status code 403. Note that this doesn't
|
||||
invalidate the user's access token.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `on_user_registration`
|
||||
|
||||
_First introduced in Synapse v1.39.0_
|
||||
|
||||
```python
|
||||
async def on_user_registration(user: str) -> None
|
||||
```
|
||||
@@ -31,3 +40,5 @@ async def on_user_registration(user: str) -> None
|
||||
Called after successfully registering a user, in case the module needs to perform extra
|
||||
operations to keep track of them. (e.g. add them to a database table). The user is
|
||||
represented by their Matrix user ID.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
@@ -2,6 +2,11 @@
|
||||
|
||||
Synapse supports extending its functionality by configuring external modules.
|
||||
|
||||
**Note**: When using third-party modules, you effectively allow someone else to run
|
||||
custom code on your Synapse homeserver. Server admins are encouraged to verify the
|
||||
provenance of the modules they use on their homeserver and make sure the modules aren't
|
||||
running malicious code on their instance.
|
||||
|
||||
## Using modules
|
||||
|
||||
To use a module on Synapse, add it to the `modules` section of the configuration file:
|
||||
@@ -18,17 +23,31 @@ modules:
|
||||
Each module is defined by a path to a Python class as well as a configuration. This
|
||||
information for a given module should be available in the module's own documentation.
|
||||
|
||||
**Note**: When using third-party modules, you effectively allow someone else to run
|
||||
custom code on your Synapse homeserver. Server admins are encouraged to verify the
|
||||
provenance of the modules they use on their homeserver and make sure the modules aren't
|
||||
running malicious code on their instance.
|
||||
## Using multiple modules
|
||||
|
||||
Also note that we are currently in the process of migrating module interfaces to this
|
||||
system. While some interfaces might be compatible with it, others still require
|
||||
configuring modules in another part of Synapse's configuration file.
|
||||
The order in which modules are listed in this section is important. When processing an
|
||||
action that can be handled by several modules, Synapse will always prioritise the module
|
||||
that appears first (i.e. is the highest in the list). This means:
|
||||
|
||||
* If several modules register the same callback, the callback registered by the module
|
||||
that appears first is used.
|
||||
* If several modules try to register a handler for the same HTTP path, only the handler
|
||||
registered by the module that appears first is used. Handlers registered by the other
|
||||
module(s) are ignored and Synapse will log a warning message about them.
|
||||
|
||||
Note that Synapse doesn't allow multiple modules implementing authentication checkers via
|
||||
the password auth provider feature for the same login type with different fields. If this
|
||||
happens, Synapse will refuse to start.
|
||||
|
||||
## Current status
|
||||
|
||||
We are currently in the process of migrating module interfaces to this system. While some
|
||||
interfaces might be compatible with it, others still require configuring modules in
|
||||
another part of Synapse's configuration file.
|
||||
|
||||
Currently, only the following pre-existing interfaces are compatible with this new system:
|
||||
|
||||
* spam checker
|
||||
* third-party rules
|
||||
* presence router
|
||||
* password auth providers
|
||||
|
||||
176
docs/modules/password_auth_provider_callbacks.md
Normal file
176
docs/modules/password_auth_provider_callbacks.md
Normal file
@@ -0,0 +1,176 @@
|
||||
# Password auth provider callbacks
|
||||
|
||||
Password auth providers offer a way for server administrators to integrate
|
||||
their Synapse installation with an external authentication system. The callbacks can be
|
||||
registered by using the Module API's `register_password_auth_provider_callbacks` method.
|
||||
|
||||
## Callbacks
|
||||
|
||||
### `auth_checkers`
|
||||
|
||||
_First introduced in Synapse v1.46.0_
|
||||
|
||||
```
|
||||
auth_checkers: Dict[Tuple[str,Tuple], Callable]
|
||||
```
|
||||
|
||||
A dict mapping from tuples of a login type identifier (such as `m.login.password`) and a
|
||||
tuple of field names (such as `("password", "secret_thing")`) to authentication checking
|
||||
callbacks, which should be of the following form:
|
||||
|
||||
```python
|
||||
async def check_auth(
|
||||
user: str,
|
||||
login_type: str,
|
||||
login_dict: "synapse.module_api.JsonDict",
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
The login type and field names should be provided by the user in the
|
||||
request to the `/login` API. [The Matrix specification](https://matrix.org/docs/spec/client_server/latest#authentication-types)
|
||||
defines some types, however user defined ones are also allowed.
|
||||
|
||||
The callback is passed the `user` field provided by the client (which might not be in
|
||||
`@username:server` form), the login type, and a dictionary of login secrets passed by
|
||||
the client.
|
||||
|
||||
If the authentication is successful, the module must return the user's Matrix ID (e.g.
|
||||
`@alice:example.com`) and optionally a callback to be called with the response to the
|
||||
`/login` request. If the module doesn't wish to return a callback, it must return `None`
|
||||
instead.
|
||||
|
||||
If the authentication is unsuccessful, the module must return `None`.
|
||||
|
||||
If multiple modules register an auth checker for the same login type but with different
|
||||
fields, Synapse will refuse to start.
|
||||
|
||||
If multiple modules register an auth checker for the same login type with the same fields,
|
||||
then the callbacks will be executed in order, until one returns a Matrix User ID (and
|
||||
optionally a callback). In that case, the return value of that callback will be accepted
|
||||
and subsequent callbacks will not be fired. If every callback returns `None`, then the
|
||||
authentication fails.
|
||||
|
||||
### `check_3pid_auth`
|
||||
|
||||
_First introduced in Synapse v1.46.0_
|
||||
|
||||
```python
|
||||
async def check_3pid_auth(
|
||||
medium: str,
|
||||
address: str,
|
||||
password: str,
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
Called when a user attempts to register or log in with a third party identifier,
|
||||
such as email. It is passed the medium (eg. `email`), an address (eg. `jdoe@example.com`)
|
||||
and the user's password.
|
||||
|
||||
If the authentication is successful, the module must return the user's Matrix ID (e.g.
|
||||
`@alice:example.com`) and optionally a callback to be called with the response to the `/login` request.
|
||||
If the module doesn't wish to return a callback, it must return None instead.
|
||||
|
||||
If the authentication is unsuccessful, the module must return `None`.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback. If every callback return `None`,
|
||||
the authentication is denied.
|
||||
|
||||
### `on_logged_out`
|
||||
|
||||
_First introduced in Synapse v1.46.0_
|
||||
|
||||
```python
|
||||
async def on_logged_out(
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
access_token: str
|
||||
) -> None
|
||||
```
|
||||
Called during a logout request for a user. It is passed the qualified user ID, the ID of the
|
||||
deactivated device (if any: access tokens are occasionally created without an associated
|
||||
device ID), and the (now deactivated) access token.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
## Example
|
||||
|
||||
The example module below implements authentication checkers for two different login types:
|
||||
- `my.login.type`
|
||||
- Expects a `my_field` field to be sent to `/login`
|
||||
- Is checked by the method: `self.check_my_login`
|
||||
- `m.login.password` (defined in [the spec](https://matrix.org/docs/spec/client_server/latest#password-based))
|
||||
- Expects a `password` field to be sent to `/login`
|
||||
- Is checked by the method: `self.check_pass`
|
||||
|
||||
|
||||
```python
|
||||
from typing import Awaitable, Callable, Optional, Tuple
|
||||
|
||||
import synapse
|
||||
from synapse import module_api
|
||||
|
||||
|
||||
class MyAuthProvider:
|
||||
def __init__(self, config: dict, api: module_api):
|
||||
|
||||
self.api = api
|
||||
|
||||
self.credentials = {
|
||||
"bob": "building",
|
||||
"@scoop:matrix.org": "digging",
|
||||
}
|
||||
|
||||
api.register_password_auth_provider_callbacks(
|
||||
auth_checkers={
|
||||
("my.login_type", ("my_field",)): self.check_my_login,
|
||||
("m.login.password", ("password",)): self.check_pass,
|
||||
},
|
||||
)
|
||||
|
||||
async def check_my_login(
|
||||
self,
|
||||
username: str,
|
||||
login_type: str,
|
||||
login_dict: "synapse.module_api.JsonDict",
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||
]
|
||||
]:
|
||||
if login_type != "my.login_type":
|
||||
return None
|
||||
|
||||
if self.credentials.get(username) == login_dict.get("my_field"):
|
||||
return self.api.get_qualified_user_id(username)
|
||||
|
||||
async def check_pass(
|
||||
self,
|
||||
username: str,
|
||||
login_type: str,
|
||||
login_dict: "synapse.module_api.JsonDict",
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||
]
|
||||
]:
|
||||
if login_type != "m.login.password":
|
||||
return None
|
||||
|
||||
if self.credentials.get(username) == login_dict.get("password"):
|
||||
return self.api.get_qualified_user_id(username)
|
||||
```
|
||||
@@ -12,6 +12,9 @@ should register this resource in its `__init__` method using the `register_web_r
|
||||
method from the `ModuleApi` class (see [this section](writing_a_module.html#registering-a-web-resource) for
|
||||
more info).
|
||||
|
||||
There is no longer a `get_db_schema_files` callback provided for password auth provider modules. Any
|
||||
changes to the database should now be made by the module using the module API class.
|
||||
|
||||
The module's author should also update any example in the module's configuration to only
|
||||
use the new `modules` section in Synapse's configuration file (see [this section](index.html#using-modules)
|
||||
for more info).
|
||||
|
||||
@@ -10,6 +10,8 @@ The available presence router callbacks are:
|
||||
|
||||
### `get_users_for_states`
|
||||
|
||||
_First introduced in Synapse v1.42.0_
|
||||
|
||||
```python
|
||||
async def get_users_for_states(
|
||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||
@@ -24,8 +26,14 @@ must return a dictionary that maps from Matrix user IDs (which can be local or r
|
||||
|
||||
Synapse will then attempt to send the specified presence updates to each user when possible.
|
||||
|
||||
If multiple modules implement this callback, Synapse merges all the dictionaries returned
|
||||
by the callbacks. If multiple callbacks return a dictionary containing the same key,
|
||||
Synapse concatenates the sets associated with this key from each dictionary.
|
||||
|
||||
### `get_interested_users`
|
||||
|
||||
_First introduced in Synapse v1.42.0_
|
||||
|
||||
```python
|
||||
async def get_interested_users(
|
||||
user_id: str
|
||||
@@ -44,6 +52,12 @@ query. The returned users can be local or remote.
|
||||
Alternatively the callback can return `synapse.module_api.PRESENCE_ALL_USERS`
|
||||
to indicate that the user should receive updates from all known users.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. Synapse
|
||||
calls each callback one by one, and use a concatenation of all the `set`s returned by the
|
||||
callbacks. If one callback returns `synapse.module_api.PRESENCE_ALL_USERS`, Synapse uses
|
||||
this value instead. If this happens, Synapse does not call any of the subsequent
|
||||
implementations of this callback.
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements both presence router callbacks, and ensures
|
||||
|
||||
@@ -10,6 +10,8 @@ The available spam checker callbacks are:
|
||||
|
||||
### `check_event_for_spam`
|
||||
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def check_event_for_spam(event: "synapse.events.EventBase") -> Union[bool, str]
|
||||
```
|
||||
@@ -19,8 +21,37 @@ either a `bool` to indicate whether the event must be rejected because of spam,
|
||||
to indicate the event must be rejected because of spam and to give a rejection reason to
|
||||
forward to clients.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `False`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `user_may_join_room`
|
||||
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def user_may_join_room(user: str, room: str, is_invited: bool) -> bool
|
||||
```
|
||||
|
||||
Called when a user is trying to join a room. The module must return a `bool` to indicate
|
||||
whether the user can join the room. The user is represented by their Matrix user ID (e.g.
|
||||
`@alice:example.com`) and the room is represented by its Matrix ID (e.g.
|
||||
`!room:example.com`). The module is also given a boolean to indicate whether the user
|
||||
currently has a pending invite in the room.
|
||||
|
||||
This callback isn't called if the join is performed by a server administrator, or in the
|
||||
context of a room creation.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `user_may_invite`
|
||||
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def user_may_invite(inviter: str, invitee: str, room_id: str) -> bool
|
||||
```
|
||||
@@ -29,8 +60,57 @@ Called when processing an invitation. The module must return a `bool` indicating
|
||||
the inviter can invite the invitee to the given room. Both inviter and invitee are
|
||||
represented by their Matrix user ID (e.g. `@alice:example.com`).
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `user_may_send_3pid_invite`
|
||||
|
||||
_First introduced in Synapse v1.45.0_
|
||||
|
||||
```python
|
||||
async def user_may_send_3pid_invite(
|
||||
inviter: str,
|
||||
medium: str,
|
||||
address: str,
|
||||
room_id: str,
|
||||
) -> bool
|
||||
```
|
||||
|
||||
Called when processing an invitation using a third-party identifier (also called a 3PID,
|
||||
e.g. an email address or a phone number). The module must return a `bool` indicating
|
||||
whether the inviter can invite the invitee to the given room.
|
||||
|
||||
The inviter is represented by their Matrix user ID (e.g. `@alice:example.com`), and the
|
||||
invitee is represented by its medium (e.g. "email") and its address
|
||||
(e.g. `alice@example.com`). See [the Matrix specification](https://matrix.org/docs/spec/appendices#pid-types)
|
||||
for more information regarding third-party identifiers.
|
||||
|
||||
For example, a call to this callback to send an invitation to the email address
|
||||
`alice@example.com` would look like this:
|
||||
|
||||
```python
|
||||
await user_may_send_3pid_invite(
|
||||
"@bob:example.com", # The inviter's user ID
|
||||
"email", # The medium of the 3PID to invite
|
||||
"alice@example.com", # The address of the 3PID to invite
|
||||
"!some_room:example.com", # The ID of the room to send the invite into
|
||||
)
|
||||
```
|
||||
|
||||
**Note**: If the third-party identifier is already associated with a matrix user ID,
|
||||
[`user_may_invite`](#user_may_invite) will be used instead.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `user_may_create_room`
|
||||
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def user_may_create_room(user: str) -> bool
|
||||
```
|
||||
@@ -38,8 +118,51 @@ async def user_may_create_room(user: str) -> bool
|
||||
Called when processing a room creation request. The module must return a `bool` indicating
|
||||
whether the given user (represented by their Matrix user ID) is allowed to create a room.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `user_may_create_room_with_invites`
|
||||
|
||||
_First introduced in Synapse v1.44.0_
|
||||
|
||||
```python
|
||||
async def user_may_create_room_with_invites(
|
||||
user: str,
|
||||
invites: List[str],
|
||||
threepid_invites: List[Dict[str, str]],
|
||||
) -> bool
|
||||
```
|
||||
|
||||
Called when processing a room creation request (right after `user_may_create_room`).
|
||||
The module is given the Matrix user ID of the user trying to create a room, as well as a
|
||||
list of Matrix users to invite and a list of third-party identifiers (3PID, e.g. email
|
||||
addresses) to invite.
|
||||
|
||||
An invited Matrix user to invite is represented by their Matrix user IDs, and an invited
|
||||
3PIDs is represented by a dict that includes the 3PID medium (e.g. "email") through its
|
||||
`medium` key and its address (e.g. "alice@example.com") through its `address` key.
|
||||
|
||||
See [the Matrix specification](https://matrix.org/docs/spec/appendices#pid-types) for more
|
||||
information regarding third-party identifiers.
|
||||
|
||||
If no invite and/or 3PID invite were specified in the room creation request, the
|
||||
corresponding list(s) will be empty.
|
||||
|
||||
**Note**: This callback is not called when a room is cloned (e.g. during a room upgrade)
|
||||
since no invites are sent when cloning a room. To cover this case, modules also need to
|
||||
implement `user_may_create_room`.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `user_may_create_room_alias`
|
||||
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def user_may_create_room_alias(user: str, room_alias: "synapse.types.RoomAlias") -> bool
|
||||
```
|
||||
@@ -48,8 +171,15 @@ Called when trying to associate an alias with an existing room. The module must
|
||||
`bool` indicating whether the given user (represented by their Matrix user ID) is allowed
|
||||
to set the given alias.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `user_may_publish_room`
|
||||
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def user_may_publish_room(user: str, room_id: str) -> bool
|
||||
```
|
||||
@@ -58,8 +188,15 @@ Called when trying to publish a room to the homeserver's public rooms directory.
|
||||
module must return a `bool` indicating whether the given user (represented by their
|
||||
Matrix user ID) is allowed to publish the given room.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `check_username_for_spam`
|
||||
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def check_username_for_spam(user_profile: Dict[str, str]) -> bool
|
||||
```
|
||||
@@ -75,8 +212,15 @@ is represented as a dictionary with the following keys:
|
||||
The module is given a copy of the original dictionary, so modifying it from within the
|
||||
module cannot modify a user's profile when included in user directory search results.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `False`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `check_registration_for_spam`
|
||||
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def check_registration_for_spam(
|
||||
email_threepid: Optional[dict],
|
||||
@@ -100,8 +244,16 @@ The arguments passed to this callback are:
|
||||
used during the registration process.
|
||||
* `auth_provider_id`: The identifier of the SSO authentication provider, if any.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `RegistrationBehaviour.ALLOW`, Synapse falls through to the next one.
|
||||
The value of the first callback that does not return `RegistrationBehaviour.ALLOW` will
|
||||
be used. If this happens, Synapse will not call any of the subsequent implementations of
|
||||
this callback.
|
||||
|
||||
### `check_media_file_for_spam`
|
||||
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def check_media_file_for_spam(
|
||||
file_wrapper: "synapse.rest.media.v1.media_storage.ReadableFileWrapper",
|
||||
@@ -112,6 +264,11 @@ async def check_media_file_for_spam(
|
||||
Called when storing a local or remote file. The module must return a boolean indicating
|
||||
whether the given file can be stored in the homeserver's media store.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `False`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the spam checker callback
|
||||
@@ -136,9 +293,9 @@ class IsUserEvilResource(Resource):
|
||||
self.evil_users = config.get("evil_users") or []
|
||||
|
||||
def render_GET(self, request: Request):
|
||||
user = request.args.get(b"user")[0]
|
||||
user = request.args.get(b"user")[0].decode()
|
||||
request.setHeader(b"Content-Type", b"application/json")
|
||||
return json.dumps({"evil": user in self.evil_users})
|
||||
return json.dumps({"evil": user in self.evil_users}).encode()
|
||||
|
||||
|
||||
class ListSpamChecker:
|
||||
|
||||
@@ -10,6 +10,8 @@ The available third party rules callbacks are:
|
||||
|
||||
### `check_event_allowed`
|
||||
|
||||
_First introduced in Synapse v1.39.0_
|
||||
|
||||
```python
|
||||
async def check_event_allowed(
|
||||
event: "synapse.events.EventBase",
|
||||
@@ -44,8 +46,15 @@ dictionary, and modify the returned dictionary accordingly.
|
||||
Note that replacing the event only works for events sent by local users, not for events
|
||||
received over federation.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `on_create_room`
|
||||
|
||||
_First introduced in Synapse v1.39.0_
|
||||
|
||||
```python
|
||||
async def on_create_room(
|
||||
requester: "synapse.types.Requester",
|
||||
@@ -63,8 +72,16 @@ the request is a server admin.
|
||||
Modules can modify the `request_content` (by e.g. adding events to its `initial_state`),
|
||||
or deny the room's creation by raising a `module_api.errors.SynapseError`.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns without raising an exception, Synapse falls through to the next one. The
|
||||
room creation will be forbidden as soon as one of the callbacks raises an exception. If
|
||||
this happens, Synapse will not call any of the subsequent implementations of this
|
||||
callback.
|
||||
|
||||
### `check_threepid_can_be_invited`
|
||||
|
||||
_First introduced in Synapse v1.39.0_
|
||||
|
||||
```python
|
||||
async def check_threepid_can_be_invited(
|
||||
medium: str,
|
||||
@@ -76,8 +93,15 @@ async def check_threepid_can_be_invited(
|
||||
Called when processing an invite via a third-party identifier (i.e. email or phone number).
|
||||
The module must return a boolean indicating whether the invite can go through.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `check_visibility_can_be_modified`
|
||||
|
||||
_First introduced in Synapse v1.39.0_
|
||||
|
||||
```python
|
||||
async def check_visibility_can_be_modified(
|
||||
room_id: str,
|
||||
@@ -90,6 +114,11 @@ Called when changing the visibility of a room in the local public room directory
|
||||
visibility is a string that's either "public" or "private". The module must return a
|
||||
boolean indicating whether the change can go through.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the third-party rules callback
|
||||
|
||||
@@ -12,6 +12,21 @@ configuration associated with the module in Synapse's configuration file.
|
||||
See the documentation for the `ModuleApi` class
|
||||
[here](https://github.com/matrix-org/synapse/blob/master/synapse/module_api/__init__.py).
|
||||
|
||||
## When Synapse runs with several modules configured
|
||||
|
||||
If Synapse is running with other modules configured, the order each module appears in
|
||||
within the `modules` section of the Synapse configuration file might restrict what it can
|
||||
or cannot register. See [this section](index.html#using-multiple-modules) for more
|
||||
information.
|
||||
|
||||
On top of the rules listed in the link above, if a callback returns a value that should
|
||||
cause the current operation to fail (e.g. if a callback checking an event returns with a
|
||||
value that should cause the event to be denied), Synapse will fail the operation and
|
||||
ignore any subsequent callbacks that should have been run after this one.
|
||||
|
||||
The documentation for each callback mentions how Synapse behaves when
|
||||
multiple modules implement it.
|
||||
|
||||
## Handling the module's configuration
|
||||
|
||||
A module can implement the following static method:
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
<h2 style="color:red">
|
||||
This page of the Synapse documentation is now deprecated. For up to date
|
||||
documentation on setting up or writing a password auth provider module, please see
|
||||
<a href="modules.md">this page</a>.
|
||||
</h2>
|
||||
|
||||
# Password auth provider modules
|
||||
|
||||
Password auth providers offer a way for server administrators to
|
||||
|
||||
@@ -472,6 +472,48 @@ limit_remote_rooms:
|
||||
#
|
||||
#user_ips_max_age: 14d
|
||||
|
||||
# Inhibits the /requestToken endpoints from returning an error that might leak
|
||||
# information about whether an e-mail address is in use or not on this
|
||||
# homeserver.
|
||||
# Note that for some endpoints the error situation is the e-mail already being
|
||||
# used, and for others the error is entering the e-mail being unused.
|
||||
# If this option is enabled, instead of returning an error, these endpoints will
|
||||
# act as if no error happened and return a fake session ID ('sid') to clients.
|
||||
#
|
||||
#request_token_inhibit_3pid_errors: true
|
||||
|
||||
# A list of domains that the domain portion of 'next_link' parameters
|
||||
# must match.
|
||||
#
|
||||
# This parameter is optionally provided by clients while requesting
|
||||
# validation of an email or phone number, and maps to a link that
|
||||
# users will be automatically redirected to after validation
|
||||
# succeeds. Clients can make use this parameter to aid the validation
|
||||
# process.
|
||||
#
|
||||
# The whitelist is applied whether the homeserver or an
|
||||
# identity server is handling validation.
|
||||
#
|
||||
# The default value is no whitelist functionality; all domains are
|
||||
# allowed. Setting this value to an empty list will instead disallow
|
||||
# all domains.
|
||||
#
|
||||
#next_link_domain_whitelist: ["matrix.org"]
|
||||
|
||||
# Templates to use when generating email or HTML page contents.
|
||||
#
|
||||
templates:
|
||||
# Directory in which Synapse will try to find template files to use to generate
|
||||
# email or HTML page contents.
|
||||
# If not set, or a file is not found within the template directory, a default
|
||||
# template from within the Synapse package will be used.
|
||||
#
|
||||
# See https://matrix-org.github.io/synapse/latest/templates.html for more
|
||||
# information about using custom templates.
|
||||
#
|
||||
#custom_template_directory: /path/to/custom/templates/
|
||||
|
||||
|
||||
# Message retention policy at the server level.
|
||||
#
|
||||
# Room admins and mods can define a retention period for their rooms using the
|
||||
@@ -541,47 +583,6 @@ retention:
|
||||
# - shortest_max_lifetime: 3d
|
||||
# interval: 1d
|
||||
|
||||
# Inhibits the /requestToken endpoints from returning an error that might leak
|
||||
# information about whether an e-mail address is in use or not on this
|
||||
# homeserver.
|
||||
# Note that for some endpoints the error situation is the e-mail already being
|
||||
# used, and for others the error is entering the e-mail being unused.
|
||||
# If this option is enabled, instead of returning an error, these endpoints will
|
||||
# act as if no error happened and return a fake session ID ('sid') to clients.
|
||||
#
|
||||
#request_token_inhibit_3pid_errors: true
|
||||
|
||||
# A list of domains that the domain portion of 'next_link' parameters
|
||||
# must match.
|
||||
#
|
||||
# This parameter is optionally provided by clients while requesting
|
||||
# validation of an email or phone number, and maps to a link that
|
||||
# users will be automatically redirected to after validation
|
||||
# succeeds. Clients can make use this parameter to aid the validation
|
||||
# process.
|
||||
#
|
||||
# The whitelist is applied whether the homeserver or an
|
||||
# identity server is handling validation.
|
||||
#
|
||||
# The default value is no whitelist functionality; all domains are
|
||||
# allowed. Setting this value to an empty list will instead disallow
|
||||
# all domains.
|
||||
#
|
||||
#next_link_domain_whitelist: ["matrix.org"]
|
||||
|
||||
# Templates to use when generating email or HTML page contents.
|
||||
#
|
||||
templates:
|
||||
# Directory in which Synapse will try to find template files to use to generate
|
||||
# email or HTML page contents.
|
||||
# If not set, or a file is not found within the template directory, a default
|
||||
# template from within the Synapse package will be used.
|
||||
#
|
||||
# See https://matrix-org.github.io/synapse/latest/templates.html for more
|
||||
# information about using custom templates.
|
||||
#
|
||||
#custom_template_directory: /path/to/custom/templates/
|
||||
|
||||
|
||||
## TLS ##
|
||||
|
||||
@@ -2260,34 +2261,6 @@ email:
|
||||
#email_validation: "[%(server_name)s] Validate your email"
|
||||
|
||||
|
||||
# Password providers allow homeserver administrators to integrate
|
||||
# their Synapse installation with existing authentication methods
|
||||
# ex. LDAP, external tokens, etc.
|
||||
#
|
||||
# For more information and known implementations, please see
|
||||
# https://matrix-org.github.io/synapse/latest/password_auth_providers.html
|
||||
#
|
||||
# Note: instances wishing to use SAML or CAS authentication should
|
||||
# instead use the `saml2_config` or `cas_config` options,
|
||||
# respectively.
|
||||
#
|
||||
password_providers:
|
||||
# # Example config for an LDAP auth provider
|
||||
# - module: "ldap_auth_provider.LdapAuthProvider"
|
||||
# config:
|
||||
# enabled: true
|
||||
# uri: "ldap://ldap.example.com:389"
|
||||
# start_tls: true
|
||||
# base: "ou=users,dc=example,dc=com"
|
||||
# attributes:
|
||||
# uid: "cn"
|
||||
# mail: "email"
|
||||
# name: "givenName"
|
||||
# #bind_dn:
|
||||
# #bind_password:
|
||||
# #filter: "(objectClass=posixAccount)"
|
||||
|
||||
|
||||
|
||||
## Push ##
|
||||
|
||||
@@ -2362,12 +2335,16 @@ user_directory:
|
||||
#enabled: false
|
||||
|
||||
# Defines whether to search all users visible to your HS when searching
|
||||
# the user directory, rather than limiting to users visible in public
|
||||
# rooms. Defaults to false.
|
||||
# the user directory. If false, search results will only contain users
|
||||
# visible in public rooms and users sharing a room with the requester.
|
||||
# Defaults to false.
|
||||
#
|
||||
# If you set it true, you'll have to rebuild the user_directory search
|
||||
# indexes, see:
|
||||
# https://matrix-org.github.io/synapse/latest/user_directory.html
|
||||
# NB. If you set this to true, and the last time the user_directory search
|
||||
# indexes were (re)built was before Synapse 1.44, you'll have to
|
||||
# rebuild the indexes in order to search through all known users.
|
||||
# These indexes are built the first time Synapse starts; admins can
|
||||
# manually trigger a rebuild following the instructions at
|
||||
# https://matrix-org.github.io/synapse/latest/user_directory.html
|
||||
#
|
||||
# Uncomment to return search results containing all known users, even if that
|
||||
# user does not share a room with the requester.
|
||||
|
||||
@@ -18,19 +18,179 @@ that your email address is probably `user@example.com` rather than
|
||||
|
||||
## Installing Synapse
|
||||
|
||||
### Installing from source
|
||||
### Prebuilt packages
|
||||
|
||||
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
||||
Prebuilt packages are available for a number of platforms. These are recommended
|
||||
for most users.
|
||||
|
||||
When installing from source please make sure that the [Platform-specific prerequisites](#platform-specific-prerequisites) are already installed.
|
||||
#### Docker images and Ansible playbooks
|
||||
|
||||
There is an official synapse image available at
|
||||
<https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
|
||||
the docker-compose file available at
|
||||
[contrib/docker](https://github.com/matrix-org/synapse/tree/develop/contrib/docker).
|
||||
Further information on this including configuration options is available in the README
|
||||
on hub.docker.com.
|
||||
|
||||
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||
Dockerfile to automate a synapse server in a single Docker image, at
|
||||
<https://hub.docker.com/r/avhost/docker-matrix/tags/>
|
||||
|
||||
Slavi Pantaleev has created an Ansible playbook,
|
||||
which installs the offical Docker image of Matrix Synapse
|
||||
along with many other Matrix-related services (Postgres database, Element, coturn,
|
||||
ma1sd, SSL support, etc.).
|
||||
For more details, see
|
||||
<https://github.com/spantaleev/matrix-docker-ansible-deploy>
|
||||
|
||||
#### Debian/Ubuntu
|
||||
|
||||
##### Matrix.org packages
|
||||
|
||||
Matrix.org provides Debian/Ubuntu packages of Synapse, for the amd64
|
||||
architecture via <https://packages.matrix.org/debian/>.
|
||||
|
||||
To install the latest release:
|
||||
|
||||
```sh
|
||||
sudo apt install -y lsb-release wget apt-transport-https
|
||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||
sudo tee /etc/apt/sources.list.d/matrix-org.list
|
||||
sudo apt update
|
||||
sudo apt install matrix-synapse-py3
|
||||
```
|
||||
|
||||
Packages are also published for release candidates. To enable the prerelease
|
||||
channel, add `prerelease` to the `sources.list` line. For example:
|
||||
|
||||
```sh
|
||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main prerelease" |
|
||||
sudo tee /etc/apt/sources.list.d/matrix-org.list
|
||||
sudo apt update
|
||||
sudo apt install matrix-synapse-py3
|
||||
```
|
||||
|
||||
The fingerprint of the repository signing key (as shown by `gpg
|
||||
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||
|
||||
##### Downstream Debian packages
|
||||
|
||||
We do not recommend using the packages from the default Debian `buster`
|
||||
repository at this time, as they are old and suffer from known security
|
||||
vulnerabilities. You can install the latest version of Synapse from
|
||||
[our repository](#matrixorg-packages) or from `buster-backports`. Please
|
||||
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
||||
for information on how to use backports.
|
||||
|
||||
If you are using Debian `sid` or testing, Synapse is available in the default
|
||||
repositories and it should be possible to install it simply with:
|
||||
|
||||
```sh
|
||||
sudo apt install matrix-synapse
|
||||
```
|
||||
|
||||
##### Downstream Ubuntu packages
|
||||
|
||||
We do not recommend using the packages in the default Ubuntu repository
|
||||
at this time, as they are old and suffer from known security vulnerabilities.
|
||||
The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
|
||||
|
||||
#### Fedora
|
||||
|
||||
Synapse is in the Fedora repositories as `matrix-synapse`:
|
||||
|
||||
```sh
|
||||
sudo dnf install matrix-synapse
|
||||
```
|
||||
|
||||
Oleg Girko provides Fedora RPMs at
|
||||
<https://obs.infoserver.lv/project/monitor/matrix-synapse>
|
||||
|
||||
#### OpenSUSE
|
||||
|
||||
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
||||
|
||||
```sh
|
||||
sudo zypper install matrix-synapse
|
||||
```
|
||||
|
||||
#### SUSE Linux Enterprise Server
|
||||
|
||||
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||
<https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/>
|
||||
|
||||
#### ArchLinux
|
||||
|
||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||
<https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
|
||||
the necessary dependencies.
|
||||
|
||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||
|
||||
```sh
|
||||
sudo pip install --upgrade pip
|
||||
```
|
||||
|
||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||
compile it under the right architecture. (This should not be needed if
|
||||
installing under virtualenv):
|
||||
|
||||
```sh
|
||||
sudo pip uninstall py-bcrypt
|
||||
sudo pip install py-bcrypt
|
||||
```
|
||||
|
||||
#### Void Linux
|
||||
|
||||
Synapse can be found in the void repositories as 'synapse':
|
||||
|
||||
```sh
|
||||
xbps-install -Su
|
||||
xbps-install -S synapse
|
||||
```
|
||||
|
||||
#### FreeBSD
|
||||
|
||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||
|
||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||
- Packages: `pkg install py37-matrix-synapse`
|
||||
|
||||
#### OpenBSD
|
||||
|
||||
As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
|
||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
||||
and mounting it to `/var/synapse` should be taken into consideration.
|
||||
|
||||
Installing Synapse:
|
||||
|
||||
```sh
|
||||
doas pkg_add synapse
|
||||
```
|
||||
|
||||
#### NixOS
|
||||
|
||||
Robin Lambertz has packaged Synapse for NixOS at:
|
||||
<https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
|
||||
|
||||
|
||||
### Installing as a Python module from PyPI
|
||||
|
||||
It's also possible to install Synapse as a Python module from PyPI.
|
||||
|
||||
When following this route please make sure that the [Platform-specific prerequisites](#platform-specific-prerequisites) are already installed.
|
||||
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.5.2 or later, up to Python 3.9.
|
||||
- Python 3.6 or later, up to Python 3.9.
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
|
||||
To install the Synapse homeserver run:
|
||||
|
||||
```sh
|
||||
@@ -203,164 +363,6 @@ be found at <https://docs.microsoft.com/en-us/windows/wsl/install-win10> for
|
||||
Windows 10 and <https://docs.microsoft.com/en-us/windows/wsl/install-on-server>
|
||||
for Windows Server.
|
||||
|
||||
### Prebuilt packages
|
||||
|
||||
As an alternative to installing from source, prebuilt packages are available
|
||||
for a number of platforms.
|
||||
|
||||
#### Docker images and Ansible playbooks
|
||||
|
||||
There is an official synapse image available at
|
||||
<https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
|
||||
the docker-compose file available at
|
||||
[contrib/docker](https://github.com/matrix-org/synapse/tree/develop/contrib/docker).
|
||||
Further information on this including configuration options is available in the README
|
||||
on hub.docker.com.
|
||||
|
||||
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||
Dockerfile to automate a synapse server in a single Docker image, at
|
||||
<https://hub.docker.com/r/avhost/docker-matrix/tags/>
|
||||
|
||||
Slavi Pantaleev has created an Ansible playbook,
|
||||
which installs the offical Docker image of Matrix Synapse
|
||||
along with many other Matrix-related services (Postgres database, Element, coturn,
|
||||
ma1sd, SSL support, etc.).
|
||||
For more details, see
|
||||
<https://github.com/spantaleev/matrix-docker-ansible-deploy>
|
||||
|
||||
#### Debian/Ubuntu
|
||||
|
||||
##### Matrix.org packages
|
||||
|
||||
Matrix.org provides Debian/Ubuntu packages of Synapse via
|
||||
<https://packages.matrix.org/debian/>. To install the latest release:
|
||||
|
||||
```sh
|
||||
sudo apt install -y lsb-release wget apt-transport-https
|
||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||
sudo tee /etc/apt/sources.list.d/matrix-org.list
|
||||
sudo apt update
|
||||
sudo apt install matrix-synapse-py3
|
||||
```
|
||||
|
||||
Packages are also published for release candidates. To enable the prerelease
|
||||
channel, add `prerelease` to the `sources.list` line. For example:
|
||||
|
||||
```sh
|
||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main prerelease" |
|
||||
sudo tee /etc/apt/sources.list.d/matrix-org.list
|
||||
sudo apt update
|
||||
sudo apt install matrix-synapse-py3
|
||||
```
|
||||
|
||||
The fingerprint of the repository signing key (as shown by `gpg
|
||||
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||
|
||||
##### Downstream Debian packages
|
||||
|
||||
We do not recommend using the packages from the default Debian `buster`
|
||||
repository at this time, as they are old and suffer from known security
|
||||
vulnerabilities. You can install the latest version of Synapse from
|
||||
[our repository](#matrixorg-packages) or from `buster-backports`. Please
|
||||
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
||||
for information on how to use backports.
|
||||
|
||||
If you are using Debian `sid` or testing, Synapse is available in the default
|
||||
repositories and it should be possible to install it simply with:
|
||||
|
||||
```sh
|
||||
sudo apt install matrix-synapse
|
||||
```
|
||||
|
||||
##### Downstream Ubuntu packages
|
||||
|
||||
We do not recommend using the packages in the default Ubuntu repository
|
||||
at this time, as they are old and suffer from known security vulnerabilities.
|
||||
The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
|
||||
|
||||
#### Fedora
|
||||
|
||||
Synapse is in the Fedora repositories as `matrix-synapse`:
|
||||
|
||||
```sh
|
||||
sudo dnf install matrix-synapse
|
||||
```
|
||||
|
||||
Oleg Girko provides Fedora RPMs at
|
||||
<https://obs.infoserver.lv/project/monitor/matrix-synapse>
|
||||
|
||||
#### OpenSUSE
|
||||
|
||||
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
||||
|
||||
```sh
|
||||
sudo zypper install matrix-synapse
|
||||
```
|
||||
|
||||
#### SUSE Linux Enterprise Server
|
||||
|
||||
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||
<https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/>
|
||||
|
||||
#### ArchLinux
|
||||
|
||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||
<https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
|
||||
the necessary dependencies.
|
||||
|
||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||
|
||||
```sh
|
||||
sudo pip install --upgrade pip
|
||||
```
|
||||
|
||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||
compile it under the right architecture. (This should not be needed if
|
||||
installing under virtualenv):
|
||||
|
||||
```sh
|
||||
sudo pip uninstall py-bcrypt
|
||||
sudo pip install py-bcrypt
|
||||
```
|
||||
|
||||
#### Void Linux
|
||||
|
||||
Synapse can be found in the void repositories as 'synapse':
|
||||
|
||||
```sh
|
||||
xbps-install -Su
|
||||
xbps-install -S synapse
|
||||
```
|
||||
|
||||
#### FreeBSD
|
||||
|
||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||
|
||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||
- Packages: `pkg install py37-matrix-synapse`
|
||||
|
||||
#### OpenBSD
|
||||
|
||||
As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
|
||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
||||
and mounting it to `/var/synapse` should be taken into consideration.
|
||||
|
||||
Installing Synapse:
|
||||
|
||||
```sh
|
||||
doas pkg_add synapse
|
||||
```
|
||||
|
||||
#### NixOS
|
||||
|
||||
Robin Lambertz has packaged Synapse for NixOS at:
|
||||
<https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
|
||||
|
||||
## Setting up Synapse
|
||||
|
||||
Once you have installed synapse as above, you will need to configure it.
|
||||
|
||||
@@ -85,6 +85,22 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.45.0
|
||||
|
||||
## Changes required to media storage provider modules when reading from the Synapse configuration object
|
||||
|
||||
Media storage provider modules that read from the Synapse configuration object (i.e. that
|
||||
read the value of `hs.config.[...]`) now need to specify the configuration section they're
|
||||
reading from. This means that if a module reads the value of e.g. `hs.config.media_store_path`,
|
||||
it needs to replace it with `hs.config.media.media_store_path`.
|
||||
|
||||
# Upgrading to v1.44.0
|
||||
|
||||
## The URL preview cache is no longer mirrored to storage providers
|
||||
The `url_cache/` and `url_cache_thumbnails/` directories in the media store are
|
||||
no longer mirrored to storage providers. These two directories can be safely
|
||||
deleted from any configured storage providers to reclaim space.
|
||||
|
||||
# Upgrading to v1.43.0
|
||||
|
||||
## The spaces summary APIs can now be handled by workers
|
||||
@@ -171,8 +187,8 @@ of this endpoint modifying the media store.
|
||||
|
||||
The current third-party rules module interface is deprecated in favour of the new generic
|
||||
modules system introduced in Synapse v1.37.0. Authors of third-party rules modules can refer
|
||||
to [this documentation](modules.md#porting-an-existing-module-that-uses-the-old-interface)
|
||||
to update their modules. Synapse administrators can refer to [this documentation](modules.md#using-modules)
|
||||
to [this documentation](modules/porting_legacy_module.md)
|
||||
to update their modules. Synapse administrators can refer to [this documentation](modules/index.md)
|
||||
to update their configuration once the modules they are using have been updated.
|
||||
|
||||
We plan to remove support for the current third-party rules interface in September 2021.
|
||||
@@ -221,9 +237,9 @@ SQLite databases are unaffected by this change.
|
||||
|
||||
The current spam checker interface is deprecated in favour of a new generic modules system.
|
||||
Authors of spam checker modules can refer to [this
|
||||
documentation](modules.md#porting-an-existing-module-that-uses-the-old-interface)
|
||||
documentation](modules/porting_legacy_module.md
|
||||
to update their modules. Synapse administrators can refer to [this
|
||||
documentation](modules.md#using-modules)
|
||||
documentation](modules/index.md)
|
||||
to update their configuration once the modules they are using have been updated.
|
||||
|
||||
We plan to remove support for the current spam checker interface in August 2021.
|
||||
@@ -332,24 +348,24 @@ Please ensure your Application Services are up to date.
|
||||
## Requirement for X-Forwarded-Proto header
|
||||
|
||||
When using Synapse with a reverse proxy (in particular, when using the
|
||||
[x_forwarded]{.title-ref} option on an HTTP listener), Synapse now
|
||||
expects to receive an [X-Forwarded-Proto]{.title-ref} header on incoming
|
||||
`x_forwarded` option on an HTTP listener), Synapse now
|
||||
expects to receive an `X-Forwarded-Proto` header on incoming
|
||||
HTTP requests. If it is not set, Synapse will log a warning on each
|
||||
received request.
|
||||
|
||||
To avoid the warning, administrators using a reverse proxy should ensure
|
||||
that the reverse proxy sets [X-Forwarded-Proto]{.title-ref} header to
|
||||
[https]{.title-ref} or [http]{.title-ref} to indicate the protocol used
|
||||
that the reverse proxy sets `X-Forwarded-Proto` header to
|
||||
`https` or `http` to indicate the protocol used
|
||||
by the client.
|
||||
|
||||
Synapse also requires the [Host]{.title-ref} header to be preserved.
|
||||
Synapse also requires the `Host` header to be preserved.
|
||||
|
||||
See the [reverse proxy documentation](reverse_proxy.md), where the
|
||||
example configurations have been updated to show how to set these
|
||||
headers.
|
||||
|
||||
(Users of [Caddy](https://caddyserver.com/) are unaffected, since we
|
||||
believe it sets [X-Forwarded-Proto]{.title-ref} by default.)
|
||||
believe it sets `X-Forwarded-Proto` by default.)
|
||||
|
||||
# Upgrading to v1.27.0
|
||||
|
||||
@@ -513,13 +529,13 @@ mapping provider to specify different algorithms, instead of the
|
||||
way](<https://matrix.org/docs/spec/appendices#mapping-from-other-character-sets>).
|
||||
|
||||
If your Synapse configuration uses a custom mapping provider
|
||||
([oidc_config.user_mapping_provider.module]{.title-ref} is specified and
|
||||
(`oidc_config.user_mapping_provider.module` is specified and
|
||||
not equal to
|
||||
[synapse.handlers.oidc_handler.JinjaOidcMappingProvider]{.title-ref})
|
||||
then you *must* ensure that [map_user_attributes]{.title-ref} of the
|
||||
`synapse.handlers.oidc_handler.JinjaOidcMappingProvider`)
|
||||
then you *must* ensure that `map_user_attributes` of the
|
||||
mapping provider performs some normalisation of the
|
||||
[localpart]{.title-ref} returned. To match previous behaviour you can
|
||||
use the [map_username_to_mxid_localpart]{.title-ref} function provided
|
||||
`localpart` returned. To match previous behaviour you can
|
||||
use the `map_username_to_mxid_localpart` function provided
|
||||
by Synapse. An example is shown below:
|
||||
|
||||
```python
|
||||
@@ -548,7 +564,7 @@ v1.24.0. The Admin API is now only accessible under:
|
||||
|
||||
- `/_synapse/admin/v1`
|
||||
|
||||
The only exception is the [/admin/whois]{.title-ref} endpoint, which is
|
||||
The only exception is the `/admin/whois` endpoint, which is
|
||||
[also available via the client-server
|
||||
API](https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid).
|
||||
|
||||
@@ -623,7 +639,7 @@ This page will appear to the user after clicking a password reset link
|
||||
that has been emailed to them.
|
||||
|
||||
To complete password reset, the page must include a way to make a
|
||||
[POST]{.title-ref} request to
|
||||
`POST` request to
|
||||
`/_synapse/client/password_reset/{medium}/submit_token` with the query
|
||||
parameters from the original link, presented as a URL-encoded form. See
|
||||
the file itself for more details.
|
||||
@@ -644,18 +660,18 @@ but the parameters are slightly different:
|
||||
|
||||
# Upgrading to v1.18.0
|
||||
|
||||
## Docker [-py3]{.title-ref} suffix will be removed in future versions
|
||||
## Docker `-py3` suffix will be removed in future versions
|
||||
|
||||
From 10th August 2020, we will no longer publish Docker images with the
|
||||
[-py3]{.title-ref} tag suffix. The images tagged with the
|
||||
[-py3]{.title-ref} suffix have been identical to the non-suffixed tags
|
||||
`-py3` tag suffix. The images tagged with the
|
||||
`-py3` suffix have been identical to the non-suffixed tags
|
||||
since release 0.99.0, and the suffix is obsolete.
|
||||
|
||||
On 10th August, we will remove the [latest-py3]{.title-ref} tag.
|
||||
Existing per-release tags (such as [v1.18.0-py3]{.title-ref}) will not
|
||||
be removed, but no new [-py3]{.title-ref} tags will be added.
|
||||
On 10th August, we will remove the `latest-py3` tag.
|
||||
Existing per-release tags (such as `v1.18.0-py3` will not
|
||||
be removed, but no new `-py3` tags will be added.
|
||||
|
||||
Scripts relying on the [-py3]{.title-ref} suffix will need to be
|
||||
Scripts relying on the `-py3` suffix will need to be
|
||||
updated.
|
||||
|
||||
## Redis replication is now recommended in lieu of TCP replication
|
||||
@@ -689,8 +705,8 @@ This will *not* be a problem for Synapse installations which were:
|
||||
If completeness of the room directory is a concern, installations which
|
||||
are affected can be repaired as follows:
|
||||
|
||||
1. Run the following sql from a [psql]{.title-ref} or
|
||||
[sqlite3]{.title-ref} console:
|
||||
1. Run the following sql from a `psql` or
|
||||
`sqlite3` console:
|
||||
|
||||
```sql
|
||||
INSERT INTO background_updates (update_name, progress_json, depends_on) VALUES
|
||||
@@ -754,8 +770,8 @@ participating in many rooms.
|
||||
of any problems.
|
||||
|
||||
1. As an initial check to see if you will be affected, you can try
|
||||
running the following query from the [psql]{.title-ref} or
|
||||
[sqlite3]{.title-ref} console. It is safe to run it while Synapse is
|
||||
running the following query from the `psql` or
|
||||
`sqlite3` console. It is safe to run it while Synapse is
|
||||
still running.
|
||||
|
||||
```sql
|
||||
@@ -1337,9 +1353,9 @@ first need to upgrade the database by running:
|
||||
|
||||
python scripts/upgrade_db_to_v0.6.0.py <db> <server_name> <signing_key>
|
||||
|
||||
Where [<db>]{.title-ref} is the location of the database,
|
||||
[<server_name>]{.title-ref} is the server name as specified in the
|
||||
synapse configuration, and [<signing_key>]{.title-ref} is the location
|
||||
Where `<db>` is the location of the database,
|
||||
`<server_name>` is the server name as specified in the
|
||||
synapse configuration, and `<signing_key>` is the location
|
||||
of the signing key as specified in the synapse configuration.
|
||||
|
||||
This may take some time to complete. Failures of signatures and content
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
# Registration Tokens
|
||||
|
||||
This API allows you to manage tokens which can be used to authenticate
|
||||
registration requests, as proposed in [MSC3231](https://github.com/govynnus/matrix-doc/blob/token-registration/proposals/3231-token-authenticated-registration.md).
|
||||
registration requests, as proposed in
|
||||
[MSC3231](https://github.com/matrix-org/matrix-doc/blob/main/proposals/3231-token-authenticated-registration.md).
|
||||
To use it, you will need to enable the `registration_requires_token` config
|
||||
option, and authenticate by providing an `access_token` for a server admin:
|
||||
see [Admin API](../../usage/administration/admin_api).
|
||||
@@ -148,7 +149,7 @@ POST /_synapse/admin/v1/registration_tokens/new
|
||||
|
||||
The request body must be a JSON object and can contain the following fields:
|
||||
- `token`: The registration token. A string of no more than 64 characters that
|
||||
consists only of characters matched by the regex `[A-Za-z0-9-_]`.
|
||||
consists only of characters matched by the regex `[A-Za-z0-9._~-]`.
|
||||
Default: randomly generated.
|
||||
- `uses_allowed`: The integer number of times the token can be used to complete
|
||||
a registration before it becomes invalid.
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
Below is a sample logging configuration file. This file can be tweaked to control how your
|
||||
homeserver will output logs. A restart of the server is generally required to apply any
|
||||
changes made to this file.
|
||||
changes made to this file. The value of the `log_config` option in your homeserver
|
||||
config should be the path to this file.
|
||||
|
||||
Note that the contents below are *not* intended to be copied and used as the basis for
|
||||
a real homeserver.yaml. Instead, if you are starting from scratch, please generate
|
||||
a fresh config using Synapse by following the instructions in
|
||||
[Installation](../../setup/installation.md).
|
||||
Note that a default logging configuration (shown below) is created automatically alongside
|
||||
the homeserver config when following the [installation instructions](../../setup/installation.md).
|
||||
It should be named `<SERVERNAME>.log.config` by default.
|
||||
|
||||
```yaml
|
||||
{{#include ../../sample_log_config.yaml}}
|
||||
```
|
||||
```
|
||||
|
||||
@@ -24,11 +24,6 @@ Finally, we also stylise the chapter titles in the left sidebar by indenting the
|
||||
slightly so that they are more visually distinguishable from the section headers
|
||||
(the bold titles). This is done through the `indent-section-headers.css` file.
|
||||
|
||||
In addition to these modifications, we have added a version picker to the documentation.
|
||||
Users can switch between documentations for different versions of Synapse.
|
||||
This functionality was implemented through the `version-picker.js` and
|
||||
`version-picker.css` files.
|
||||
|
||||
More information can be found in mdbook's official documentation for
|
||||
[injecting page JS/CSS](https://rust-lang.github.io/mdBook/format/config.html)
|
||||
and
|
||||
|
||||
@@ -131,18 +131,6 @@
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
{{/if}}
|
||||
<div class="version-picker">
|
||||
<div class="dropdown">
|
||||
<div class="select">
|
||||
<span></span>
|
||||
<i class="fa fa-chevron-down"></i>
|
||||
</div>
|
||||
<input type="hidden" name="version">
|
||||
<ul class="dropdown-menu">
|
||||
<!-- Versions will be added dynamically in version-picker.js -->
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h1 class="menu-title">{{ book_title }}</h1>
|
||||
@@ -321,4 +309,4 @@
|
||||
{{/if}}
|
||||
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
@@ -1,78 +0,0 @@
|
||||
.version-picker {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.version-picker .dropdown {
|
||||
width: 130px;
|
||||
max-height: 29px;
|
||||
margin-left: 10px;
|
||||
display: inline-block;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
position: relative;
|
||||
font-size: 13px;
|
||||
color: var(--fg);
|
||||
height: 100%;
|
||||
text-align: left;
|
||||
}
|
||||
.version-picker .dropdown .select {
|
||||
cursor: pointer;
|
||||
display: block;
|
||||
padding: 5px 2px 5px 15px;
|
||||
}
|
||||
.version-picker .dropdown .select > i {
|
||||
font-size: 10px;
|
||||
color: var(--fg);
|
||||
cursor: pointer;
|
||||
float: right;
|
||||
line-height: 20px !important;
|
||||
}
|
||||
.version-picker .dropdown:hover {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
}
|
||||
.version-picker .dropdown:active {
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active:hover,
|
||||
.version-picker .dropdown.active {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 2px 2px 0 0;
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active .select > i {
|
||||
transform: rotate(-180deg);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
position: absolute;
|
||||
background-color: var(--theme-popup-bg);
|
||||
width: 100%;
|
||||
left: -1px;
|
||||
right: 1px;
|
||||
margin-top: 1px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 0 0 4px 4px;
|
||||
overflow: hidden;
|
||||
display: none;
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
z-index: 9;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li {
|
||||
font-size: 12px;
|
||||
padding: 6px 20px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li:hover {
|
||||
background-color: var(--theme-hover);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li.active::before {
|
||||
display: inline-block;
|
||||
content: "✓";
|
||||
margin-inline-start: -14px;
|
||||
width: 14px;
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
|
||||
const dropdown = document.querySelector('.version-picker .dropdown');
|
||||
const dropdownMenu = dropdown.querySelector('.dropdown-menu');
|
||||
|
||||
fetchVersions(dropdown, dropdownMenu).then(() => {
|
||||
initializeVersionDropdown(dropdown, dropdownMenu);
|
||||
});
|
||||
|
||||
/**
|
||||
* Initialize the dropdown functionality for version selection.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
*/
|
||||
function initializeVersionDropdown(dropdown, dropdownMenu) {
|
||||
// Toggle the dropdown menu on click
|
||||
dropdown.addEventListener('click', function () {
|
||||
this.setAttribute('tabindex', 1);
|
||||
this.classList.toggle('active');
|
||||
dropdownMenu.style.display = (dropdownMenu.style.display === 'block') ? 'none' : 'block';
|
||||
});
|
||||
|
||||
// Remove the 'active' class and hide the dropdown menu on focusout
|
||||
dropdown.addEventListener('focusout', function () {
|
||||
this.classList.remove('active');
|
||||
dropdownMenu.style.display = 'none';
|
||||
});
|
||||
|
||||
// Handle item selection within the dropdown menu
|
||||
const dropdownMenuItems = dropdownMenu.querySelectorAll('li');
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.addEventListener('click', function () {
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.classList.remove('active');
|
||||
});
|
||||
this.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = this.textContent;
|
||||
dropdown.querySelector('input').value = this.getAttribute('id');
|
||||
|
||||
window.location.href = changeVersion(window.location.href, this.textContent);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* This function fetches the available versions from a GitHub repository
|
||||
* and inserts them into the version picker.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
* @returns {Promise<Array<string>>} A promise that resolves with an array of available versions.
|
||||
*/
|
||||
function fetchVersions(dropdown, dropdownMenu) {
|
||||
return new Promise((resolve, reject) => {
|
||||
window.addEventListener("load", () => {
|
||||
|
||||
fetch("https://api.github.com/repos/matrix-org/synapse/git/trees/gh-pages", {
|
||||
cache: "force-cache",
|
||||
}).then(res =>
|
||||
res.json()
|
||||
).then(resObject => {
|
||||
const excluded = ['dev-docs', 'v1.91.0', 'v1.80.0', 'v1.69.0'];
|
||||
const tree = resObject.tree.filter(item => item.type === "tree" && !excluded.includes(item.path));
|
||||
const versions = tree.map(item => item.path).sort(sortVersions);
|
||||
|
||||
// Create a list of <li> items for versions
|
||||
versions.forEach((version) => {
|
||||
const li = document.createElement("li");
|
||||
li.textContent = version;
|
||||
li.id = version;
|
||||
|
||||
if (window.SYNAPSE_VERSION === version) {
|
||||
li.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = version;
|
||||
dropdown.querySelector('input').value = version;
|
||||
}
|
||||
|
||||
dropdownMenu.appendChild(li);
|
||||
});
|
||||
|
||||
resolve(versions);
|
||||
|
||||
}).catch(ex => {
|
||||
console.error("Failed to fetch version data", ex);
|
||||
reject(ex);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom sorting function to sort an array of version strings.
|
||||
*
|
||||
* @param {string} a - The first version string to compare.
|
||||
* @param {string} b - The second version string to compare.
|
||||
* @returns {number} - A negative number if a should come before b, a positive number if b should come before a, or 0 if they are equal.
|
||||
*/
|
||||
function sortVersions(a, b) {
|
||||
// Put 'develop' and 'latest' at the top
|
||||
if (a === 'develop' || a === 'latest') return -1;
|
||||
if (b === 'develop' || b === 'latest') return 1;
|
||||
|
||||
const versionA = (a.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
const versionB = (b.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
|
||||
return versionB.localeCompare(versionA);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change the version in a URL path.
|
||||
*
|
||||
* @param {string} url - The original URL to be modified.
|
||||
* @param {string} newVersion - The new version to replace the existing version in the URL.
|
||||
* @returns {string} The updated URL with the new version.
|
||||
*/
|
||||
function changeVersion(url, newVersion) {
|
||||
const parsedURL = new URL(url);
|
||||
const pathSegments = parsedURL.pathname.split('/');
|
||||
|
||||
// Modify the version
|
||||
pathSegments[2] = newVersion;
|
||||
|
||||
// Reconstruct the URL
|
||||
parsedURL.pathname = pathSegments.join('/');
|
||||
|
||||
return parsedURL.href;
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
window.SYNAPSE_VERSION = 'v1.43';
|
||||
@@ -1,4 +1,79 @@
|
||||
# Introduction
|
||||
|
||||
Welcome to the documentation repository for Synapse, the reference
|
||||
[Matrix](https://matrix.org) homeserver implementation.
|
||||
Welcome to the documentation repository for Synapse, a
|
||||
[Matrix](https://matrix.org) homeserver implementation developed by the matrix.org core
|
||||
team.
|
||||
|
||||
## Installing and using Synapse
|
||||
|
||||
This documentation covers topics for **installation**, **configuration** and
|
||||
**maintainence** of your Synapse process:
|
||||
|
||||
* Learn how to [install](setup/installation.md) and
|
||||
[configure](usage/configuration/index.html) your own instance, perhaps with [Single
|
||||
Sign-On](usage/configuration/user_authentication/index.html).
|
||||
|
||||
* See how to [upgrade](upgrade.md) between Synapse versions.
|
||||
|
||||
* Administer your instance using the [Admin
|
||||
API](usage/administration/admin_api/index.html), installing [pluggable
|
||||
modules](modules/index.html), or by accessing the [manhole](manhole.md).
|
||||
|
||||
* Learn how to [read log lines](usage/administration/request_log.md), configure
|
||||
[logging](usage/configuration/logging_sample_config.md) or set up [structured
|
||||
logging](structured_logging.md).
|
||||
|
||||
* Scale Synapse through additional [worker processes](workers.md).
|
||||
|
||||
* Set up [monitoring and metrics](metrics-howto.md) to keep an eye on your
|
||||
Synapse instance's performance.
|
||||
|
||||
## Developing on Synapse
|
||||
|
||||
Contributions are welcome! Synapse is primarily written in
|
||||
[Python](https://python.org). As a developer, you may be interested in the
|
||||
following documentation:
|
||||
|
||||
* Read the [Contributing Guide](development/contributing_guide.md). It is meant
|
||||
to walk new contributors through the process of developing and submitting a
|
||||
change to the Synapse codebase (which is [hosted on
|
||||
GitHub](https://github.com/matrix-org/synapse)).
|
||||
|
||||
* Set up your [development
|
||||
environment](development/contributing_guide.md#2-what-do-i-need), then learn
|
||||
how to [lint](development/contributing_guide.md#run-the-linters) and
|
||||
[test](development/contributing_guide.md#8-test-test-test) your code.
|
||||
|
||||
* Look at [the issue tracker](https://github.com/matrix-org/synapse/issues) for
|
||||
bugs to fix or features to add. If you're new, it may be best to start with
|
||||
those labeled [good first
|
||||
issue](https://github.com/matrix-org/synapse/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22).
|
||||
|
||||
* Understand [how Synapse is
|
||||
built](development/internal_documentation/index.html), how to [migrate
|
||||
database schemas](development/database_schema.md), learn about
|
||||
[federation](federate.md) and how to [set up a local
|
||||
federation](federate.md#running-a-demo-federation-of-synapses) for development.
|
||||
|
||||
* We like to keep our `git` history clean. [Learn](development/git.md) how to
|
||||
do so!
|
||||
|
||||
* And finally, contribute to this documentation! The source for which is
|
||||
[located here](https://github.com/matrix-org/synapse/tree/develop/docs).
|
||||
|
||||
## Donating to Synapse development
|
||||
|
||||
Want to help keep Synapse going but don't know how to code? Synapse is a
|
||||
[Matrix.org Foundation](https://matrix.org) project. Consider becoming a
|
||||
supportor on [Liberapay](https://liberapay.com/matrixdotorg),
|
||||
[Patreon](https://patreon.com/matrixdotorg) or through
|
||||
[PayPal](https://paypal.me/matrixdotorg) via a one-time donation.
|
||||
|
||||
If you are an organisation or enterprise and would like to sponsor development,
|
||||
reach out to us over email at: support (at) matrix.org
|
||||
|
||||
## Reporting a security vulnerability
|
||||
|
||||
If you've found a security issue in Synapse or any other Matrix.org Foundation
|
||||
project, please report it to us in accordance with our [Security Disclosure
|
||||
Policy](https://www.matrix.org/security-disclosure-policy/). Thank you!
|
||||
|
||||
206
mypy.ini
206
mypy.ini
@@ -22,8 +22,11 @@ files =
|
||||
synapse/crypto,
|
||||
synapse/event_auth.py,
|
||||
synapse/events/builder.py,
|
||||
synapse/events/presence_router.py,
|
||||
synapse/events/snapshot.py,
|
||||
synapse/events/spamcheck.py,
|
||||
synapse/events/third_party_rules.py,
|
||||
synapse/events/utils.py,
|
||||
synapse/events/validator.py,
|
||||
synapse/federation,
|
||||
synapse/groups,
|
||||
@@ -53,6 +56,7 @@ files =
|
||||
synapse/storage/_base.py,
|
||||
synapse/storage/background_updates.py,
|
||||
synapse/storage/databases/main/appservice.py,
|
||||
synapse/storage/databases/main/client_ips.py,
|
||||
synapse/storage/databases/main/events.py,
|
||||
synapse/storage/databases/main/keys.py,
|
||||
synapse/storage/databases/main/pusher.py,
|
||||
@@ -60,6 +64,7 @@ files =
|
||||
synapse/storage/databases/main/session.py,
|
||||
synapse/storage/databases/main/stream.py,
|
||||
synapse/storage/databases/main/ui_auth.py,
|
||||
synapse/storage/databases/state,
|
||||
synapse/storage/database.py,
|
||||
synapse/storage/engines,
|
||||
synapse/storage/keys.py,
|
||||
@@ -84,20 +89,74 @@ files =
|
||||
tests/handlers/test_room_summary.py,
|
||||
tests/handlers/test_send_email.py,
|
||||
tests/handlers/test_sync.py,
|
||||
tests/handlers/test_user_directory.py,
|
||||
tests/rest/client/test_login.py,
|
||||
tests/rest/client/test_auth.py,
|
||||
tests/rest/client/test_relations.py,
|
||||
tests/rest/media/v1/test_filepath.py,
|
||||
tests/rest/media/v1/test_oembed.py,
|
||||
tests/storage/test_state.py,
|
||||
tests/storage/test_user_directory.py,
|
||||
tests/util/test_itertools.py,
|
||||
tests/util/test_stream_change_cache.py
|
||||
|
||||
[mypy-synapse.rest.client.*]
|
||||
[mypy-synapse.api.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.crypto.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.events.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.handlers.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.push.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.rest.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.server_notices.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.state.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.storage.databases.main.client_ips]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.storage.util.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.streams.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.batching_queue]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.caches.cached_call]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.caches.dictionary_cache]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.caches.lrucache]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.caches.response_cache]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.caches.stream_change_cache]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.caches.ttl_cache]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.daemonize]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.file_consumer]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
@@ -134,6 +193,9 @@ disallow_untyped_defs = True
|
||||
[mypy-synapse.util.msisdn]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.patch_inline_callbacks]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.util.ratelimitutils]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
@@ -155,98 +217,106 @@ disallow_untyped_defs = True
|
||||
[mypy-synapse.util.wheel_timer]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-pymacaroons.*]
|
||||
ignore_missing_imports = True
|
||||
[mypy-synapse.util.versionstring]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-zope]
|
||||
[mypy-tests.handlers.test_user_directory]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.storage.test_user_directory]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
;; Dependencies without annotations
|
||||
;; Before ignoring a module, check to see if type stubs are available.
|
||||
;; The `typeshed` project maintains stubs here:
|
||||
;; https://github.com/python/typeshed/tree/master/stubs
|
||||
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
|
||||
;; which we can pull in as a dev dependency by adding to `setup.py`'s
|
||||
;; `CONDITIONAL_REQUIREMENTS["mypy"]` list.
|
||||
|
||||
[mypy-authlib.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-bcrypt]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-constantly]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-twisted.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-treq.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hyperlink]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-h11]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-opentracing]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-OpenSSL.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-saml2.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-canonicaljson]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jaeger_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jsonschema]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-signedjson.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-prometheus_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-service_identity.*]
|
||||
[mypy-constantly]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-daemonize]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-sentry_sdk]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-PIL.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jwt.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-authlib.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-rust_python_jaeger_reporter.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-nacl.*]
|
||||
[mypy-h11]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hiredis]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hyperlink]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ijson.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jaeger_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-josepy.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pympler.*]
|
||||
[mypy-jwt.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-nacl.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-opentracing]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-phonenumbers.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ijson.*]
|
||||
[mypy-prometheus_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pymacaroons.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pympler.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-rust_python_jaeger_reporter.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-saml2.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-sentry_sdk]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-service_identity.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-signedjson.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-treq.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-twisted.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-zope]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@@ -27,6 +27,7 @@ DISTS = (
|
||||
"ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23)
|
||||
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
||||
"ubuntu:hirsute", # 21.04 (EOL 2022-01-05)
|
||||
"ubuntu:impish", # 21.10 (EOL 2022-07)
|
||||
)
|
||||
|
||||
DESC = """\
|
||||
|
||||
@@ -90,10 +90,10 @@ else
|
||||
"scripts/hash_password"
|
||||
"scripts/register_new_matrix_user"
|
||||
"scripts/synapse_port_db"
|
||||
"scripts/update_synapse_database"
|
||||
"scripts-dev"
|
||||
"scripts-dev/build_debian_packages"
|
||||
"scripts-dev/sign_json"
|
||||
"scripts-dev/update_database"
|
||||
"contrib" "synctl" "setup.py" "synmark" "stubs" ".ci"
|
||||
)
|
||||
fi
|
||||
|
||||
@@ -147,7 +147,7 @@ python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
echo "Running db background jobs..."
|
||||
scripts-dev/update_database --database-config "$SQLITE_CONFIG"
|
||||
scripts/update_synapse_database --database-config --run-background-updates "$SQLITE_CONFIG"
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
echo "Creating postgres database..."
|
||||
|
||||
@@ -35,6 +35,19 @@ from github import Github
|
||||
from packaging import version
|
||||
|
||||
|
||||
def run_until_successful(command, *args, **kwargs):
|
||||
while True:
|
||||
completed_process = subprocess.run(command, *args, **kwargs)
|
||||
exit_code = completed_process.returncode
|
||||
if exit_code == 0:
|
||||
# successful, so nothing more to do here.
|
||||
return completed_process
|
||||
|
||||
print(f"The command {command!r} failed with exit code {exit_code}.")
|
||||
print("Please try to correct the failure and then re-run.")
|
||||
click.confirm("Try again?", abort=True)
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
"""An interactive script to walk through the parts of creating a release.
|
||||
@@ -197,7 +210,7 @@ def prepare():
|
||||
f.write(parsed_synapse_ast.dumps())
|
||||
|
||||
# Generate changelogs
|
||||
subprocess.run("python3 -m towncrier", shell=True)
|
||||
run_until_successful("python3 -m towncrier", shell=True)
|
||||
|
||||
# Generate debian changelogs
|
||||
if parsed_new_version.pre is not None:
|
||||
@@ -209,11 +222,11 @@ def prepare():
|
||||
else:
|
||||
debian_version = new_version
|
||||
|
||||
subprocess.run(
|
||||
run_until_successful(
|
||||
f'dch -M -v {debian_version} "New synapse release {debian_version}."',
|
||||
shell=True,
|
||||
)
|
||||
subprocess.run('dch -M -r -D stable ""', shell=True)
|
||||
run_until_successful('dch -M -r -D stable ""', shell=True)
|
||||
|
||||
# Show the user the changes and ask if they want to edit the change log.
|
||||
repo.git.add("-u")
|
||||
@@ -224,7 +237,7 @@ def prepare():
|
||||
|
||||
# Commit the changes.
|
||||
repo.git.add("-u")
|
||||
repo.git.commit(f"-m {new_version}")
|
||||
repo.git.commit("-m", new_version)
|
||||
|
||||
# We give the option to bail here in case the user wants to make sure things
|
||||
# are OK before pushing.
|
||||
@@ -239,6 +252,8 @@ def prepare():
|
||||
# Otherwise, push and open the changelog in the browser.
|
||||
repo.git.push("-u", repo.remote().name, repo.active_branch.name)
|
||||
|
||||
print("Opening the changelog in your browser...")
|
||||
print("Please ask others to give it a check.")
|
||||
click.launch(
|
||||
f"https://github.com/matrix-org/synapse/blob/{repo.active_branch.name}/CHANGES.md"
|
||||
)
|
||||
@@ -276,7 +291,7 @@ def tag(gh_token: Optional[str]):
|
||||
if click.confirm("Edit text?", default=False):
|
||||
changes = click.edit(changes, require_save=False)
|
||||
|
||||
repo.create_tag(tag_name, message=changes)
|
||||
repo.create_tag(tag_name, message=changes, sign=True)
|
||||
|
||||
if not click.confirm("Push tag to GitHub?", default=True):
|
||||
print("")
|
||||
@@ -290,7 +305,19 @@ def tag(gh_token: Optional[str]):
|
||||
|
||||
# If no token was given, we bail here
|
||||
if not gh_token:
|
||||
print("Launching the GitHub release page in your browser.")
|
||||
print("Please correct the title and create a draft.")
|
||||
if current_version.is_prerelease:
|
||||
print("As this is an RC, remember to mark it as a pre-release!")
|
||||
print("(by the way, this step can be automated by passing --gh-token,")
|
||||
print("or one of the GH_TOKEN or GITHUB_TOKEN env vars.)")
|
||||
click.launch(f"https://github.com/matrix-org/synapse/releases/edit/{tag_name}")
|
||||
|
||||
print("Once done, you need to wait for the release assets to build.")
|
||||
if click.confirm("Launch the release assets actions page?", default=True):
|
||||
click.launch(
|
||||
f"https://github.com/matrix-org/synapse/actions?query=branch%3A{tag_name}"
|
||||
)
|
||||
return
|
||||
|
||||
# Create a new draft release
|
||||
@@ -305,6 +332,7 @@ def tag(gh_token: Optional[str]):
|
||||
)
|
||||
|
||||
# Open the release and the actions where we are building the assets.
|
||||
print("Launching the release page and the actions page.")
|
||||
click.launch(release.html_url)
|
||||
click.launch(
|
||||
f"https://github.com/matrix-org/synapse/actions?query=branch%3A{tag_name}"
|
||||
|
||||
@@ -51,13 +51,19 @@ Example usage:
|
||||
"request with.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-K",
|
||||
"--signing-key",
|
||||
help="The private ed25519 key to sign the request with.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
default="homeserver.yaml",
|
||||
help=(
|
||||
"Path to synapse config file, from which the server name and/or signing "
|
||||
"key path will be read. Ignored if --server-name and --signing-key-path "
|
||||
"key path will be read. Ignored if --server-name and --signing-key(-path) "
|
||||
"are both given."
|
||||
),
|
||||
)
|
||||
@@ -87,11 +93,14 @@ Example usage:
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.server_name or not args.signing_key_path:
|
||||
if not args.server_name or not (args.signing_key_path or args.signing_key):
|
||||
read_args_from_config(args)
|
||||
|
||||
with open(args.signing_key_path) as f:
|
||||
key = read_signing_keys(f)[0]
|
||||
if args.signing_key:
|
||||
keys = read_signing_keys([args.signing_key])
|
||||
else:
|
||||
with open(args.signing_key_path) as f:
|
||||
keys = read_signing_keys(f)
|
||||
|
||||
json_to_sign = args.input_data
|
||||
if json_to_sign is None:
|
||||
@@ -107,7 +116,7 @@ Example usage:
|
||||
print("Input json was not an object", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
sign_json(obj, args.server_name, key)
|
||||
sign_json(obj, args.server_name, keys[0])
|
||||
for c in json_encoder.iterencode(obj):
|
||||
args.output.write(c)
|
||||
args.output.write("\n")
|
||||
@@ -118,8 +127,17 @@ def read_args_from_config(args: argparse.Namespace) -> None:
|
||||
config = yaml.safe_load(fh)
|
||||
if not args.server_name:
|
||||
args.server_name = config["server_name"]
|
||||
if not args.signing_key_path:
|
||||
args.signing_key_path = config["signing_key_path"]
|
||||
if not args.signing_key_path and not args.signing_key:
|
||||
if "signing_key" in config:
|
||||
args.signing_key = config["signing_key"]
|
||||
elif "signing_key_path" in config:
|
||||
args.signing_key_path = config["signing_key_path"]
|
||||
else:
|
||||
print(
|
||||
"A signing key must be given on the commandline or in the config file.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
19
scripts-dev/test_postgresql.sh
Executable file
19
scripts-dev/test_postgresql.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script builds the Docker image to run the PostgreSQL tests, and then runs
|
||||
# the tests. It uses a dedicated tox environment so that we don't have to
|
||||
# rebuild it each time.
|
||||
|
||||
# Command line arguments to this script are forwarded to "tox" and then to "trial".
|
||||
|
||||
set -e
|
||||
|
||||
# Build, and tag
|
||||
docker build docker/ \
|
||||
--build-arg "UID=$(id -u)" \
|
||||
--build-arg "GID=$(id -g)" \
|
||||
-f docker/Dockerfile-pgtests \
|
||||
-t synapsepgtests
|
||||
|
||||
# Run, mounting the current directory into /src
|
||||
docker run --rm -it -v "$(pwd):/src" -v synapse-pg-test-tox:/tox synapsepgtests "$@"
|
||||
@@ -215,7 +215,7 @@ class MockHomeserver:
|
||||
def __init__(self, config):
|
||||
self.clock = Clock(reactor)
|
||||
self.config = config
|
||||
self.hostname = config.server_name
|
||||
self.hostname = config.server.server_name
|
||||
self.version_string = "Synapse/" + get_version_string(synapse)
|
||||
|
||||
def get_clock(self):
|
||||
@@ -583,7 +583,7 @@ class Porter(object):
|
||||
return
|
||||
|
||||
self.postgres_store = self.build_db_store(
|
||||
self.hs_config.get_single_database()
|
||||
self.hs_config.database.get_single_database()
|
||||
)
|
||||
|
||||
await self.run_background_updates_on_postgres()
|
||||
@@ -1069,7 +1069,7 @@ class CursesProgress(Progress):
|
||||
|
||||
self.stdscr.addstr(0, 0, status, curses.A_BOLD)
|
||||
|
||||
max_len = max([len(t) for t in self.tables.keys()])
|
||||
max_len = max(len(t) for t in self.tables.keys())
|
||||
|
||||
left_margin = 5
|
||||
middle_space = 1
|
||||
|
||||
@@ -36,16 +36,35 @@ class MockHomeserver(HomeServer):
|
||||
|
||||
def __init__(self, config, **kwargs):
|
||||
super(MockHomeserver, self).__init__(
|
||||
config.server_name, reactor=reactor, config=config, **kwargs
|
||||
config.server.server_name, reactor=reactor, config=config, **kwargs
|
||||
)
|
||||
|
||||
self.version_string = "Synapse/" + get_version_string(synapse)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def run_background_updates(hs):
|
||||
store = hs.get_datastore()
|
||||
|
||||
async def run_background_updates():
|
||||
await store.db_pool.updates.run_background_updates(sleep=False)
|
||||
# Stop the reactor to exit the script once every background update is run.
|
||||
reactor.stop()
|
||||
|
||||
def run():
|
||||
# Apply all background updates on the database.
|
||||
defer.ensureDeferred(
|
||||
run_as_background_process("background_updates", run_background_updates)
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Updates a synapse database to the latest schema and runs background updates"
|
||||
"Updates a synapse database to the latest schema and optionally runs background updates"
|
||||
" on it."
|
||||
)
|
||||
)
|
||||
@@ -54,7 +73,13 @@ if __name__ == "__main__":
|
||||
"--database-config",
|
||||
type=argparse.FileType("r"),
|
||||
required=True,
|
||||
help="A database config file for either a SQLite3 database or a PostgreSQL one.",
|
||||
help="Synapse configuration file, giving the details of the database to be updated",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--run-background-updates",
|
||||
action="store_true",
|
||||
required=False,
|
||||
help="run background updates after upgrading the database schema",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
@@ -82,19 +107,10 @@ if __name__ == "__main__":
|
||||
# Setup instantiates the store within the homeserver object and updates the
|
||||
# DB.
|
||||
hs.setup()
|
||||
store = hs.get_datastore()
|
||||
|
||||
async def run_background_updates():
|
||||
await store.db_pool.updates.run_background_updates(sleep=False)
|
||||
# Stop the reactor to exit the script once every background update is run.
|
||||
reactor.stop()
|
||||
if args.run_background_updates:
|
||||
run_background_updates(hs)
|
||||
|
||||
def run():
|
||||
# Apply all background updates on the database.
|
||||
defer.ensureDeferred(
|
||||
run_as_background_process("background_updates", run_background_updates)
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
|
||||
reactor.run()
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
32
setup.py
32
setup.py
@@ -103,17 +103,17 @@ CONDITIONAL_REQUIREMENTS["lint"] = [
|
||||
"flake8",
|
||||
]
|
||||
|
||||
CONDITIONAL_REQUIREMENTS["dev"] = CONDITIONAL_REQUIREMENTS["lint"] + [
|
||||
# The following are used by the release script
|
||||
"click==7.1.2",
|
||||
"redbaron==0.9.2",
|
||||
"GitPython==3.1.14",
|
||||
"commonmark==0.9.1",
|
||||
"pygithub==1.55",
|
||||
CONDITIONAL_REQUIREMENTS["mypy"] = [
|
||||
"mypy==0.910",
|
||||
"mypy-zope==0.3.2",
|
||||
"types-bleach>=4.1.0",
|
||||
"types-jsonschema>=3.2.0",
|
||||
"types-Pillow>=8.3.4",
|
||||
"types-pyOpenSSL>=20.0.7",
|
||||
"types-PyYAML>=5.4.10",
|
||||
"types-setuptools>=57.4.0",
|
||||
]
|
||||
|
||||
CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
|
||||
|
||||
# Dependencies which are exclusively required by unit test code. This is
|
||||
# NOT a list of all modules that are necessary to run the unit tests.
|
||||
# Tests assume that all optional dependencies are installed.
|
||||
@@ -121,6 +121,20 @@ CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
|
||||
# parameterized_class decorator was introduced in parameterized 0.7.0
|
||||
CONDITIONAL_REQUIREMENTS["test"] = ["parameterized>=0.7.0"]
|
||||
|
||||
CONDITIONAL_REQUIREMENTS["dev"] = (
|
||||
CONDITIONAL_REQUIREMENTS["lint"]
|
||||
+ CONDITIONAL_REQUIREMENTS["mypy"]
|
||||
+ CONDITIONAL_REQUIREMENTS["test"]
|
||||
+ [
|
||||
# The following are used by the release script
|
||||
"click==7.1.2",
|
||||
"redbaron==0.9.2",
|
||||
"GitPython==3.1.14",
|
||||
"commonmark==0.9.1",
|
||||
"pygithub==1.55",
|
||||
]
|
||||
)
|
||||
|
||||
setup(
|
||||
name="matrix-synapse",
|
||||
version=version,
|
||||
|
||||
@@ -47,7 +47,7 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.43.0"
|
||||
__version__ = "1.46.0"
|
||||
|
||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
# We import here so that we don't have to install a bunch of deps when
|
||||
|
||||
@@ -70,8 +70,8 @@ class Auth:
|
||||
|
||||
self._auth_blocking = AuthBlocking(self.hs)
|
||||
|
||||
self._track_appservice_user_ips = hs.config.track_appservice_user_ips
|
||||
self._macaroon_secret_key = hs.config.macaroon_secret_key
|
||||
self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips
|
||||
self._macaroon_secret_key = hs.config.key.macaroon_secret_key
|
||||
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
|
||||
|
||||
async def check_user_in_room(
|
||||
@@ -245,7 +245,7 @@ class Auth:
|
||||
|
||||
async def validate_appservice_can_control_user_id(
|
||||
self, app_service: ApplicationService, user_id: str
|
||||
):
|
||||
) -> None:
|
||||
"""Validates that the app service is allowed to control
|
||||
the given user.
|
||||
|
||||
@@ -618,5 +618,13 @@ class Auth:
|
||||
% (user_id, room_id),
|
||||
)
|
||||
|
||||
async def check_auth_blocking(self, *args, **kwargs) -> None:
|
||||
await self._auth_blocking.check_auth_blocking(*args, **kwargs)
|
||||
async def check_auth_blocking(
|
||||
self,
|
||||
user_id: Optional[str] = None,
|
||||
threepid: Optional[dict] = None,
|
||||
user_type: Optional[str] = None,
|
||||
requester: Optional[Requester] = None,
|
||||
) -> None:
|
||||
await self._auth_blocking.check_auth_blocking(
|
||||
user_id=user_id, threepid=threepid, user_type=user_type, requester=requester
|
||||
)
|
||||
|
||||
@@ -30,13 +30,15 @@ class AuthBlocking:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
self._server_notices_mxid = hs.config.server_notices_mxid
|
||||
self._hs_disabled = hs.config.hs_disabled
|
||||
self._hs_disabled_message = hs.config.hs_disabled_message
|
||||
self._admin_contact = hs.config.admin_contact
|
||||
self._max_mau_value = hs.config.max_mau_value
|
||||
self._limit_usage_by_mau = hs.config.limit_usage_by_mau
|
||||
self._mau_limits_reserved_threepids = hs.config.mau_limits_reserved_threepids
|
||||
self._server_notices_mxid = hs.config.servernotices.server_notices_mxid
|
||||
self._hs_disabled = hs.config.server.hs_disabled
|
||||
self._hs_disabled_message = hs.config.server.hs_disabled_message
|
||||
self._admin_contact = hs.config.server.admin_contact
|
||||
self._max_mau_value = hs.config.server.max_mau_value
|
||||
self._limit_usage_by_mau = hs.config.server.limit_usage_by_mau
|
||||
self._mau_limits_reserved_threepids = (
|
||||
hs.config.server.mau_limits_reserved_threepids
|
||||
)
|
||||
self._server_name = hs.hostname
|
||||
self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips
|
||||
|
||||
@@ -79,7 +81,7 @@ class AuthBlocking:
|
||||
# We never block the server from doing actions on behalf of
|
||||
# users.
|
||||
return
|
||||
elif requester.app_service and not self._track_appservice_user_ips:
|
||||
if requester.app_service and not self._track_appservice_user_ips:
|
||||
# If we're authenticated as an appservice then we only block
|
||||
# auth if `track_appservice_user_ips` is set, as that option
|
||||
# implicitly means that application services are part of MAU
|
||||
|
||||
@@ -121,7 +121,7 @@ class EventTypes:
|
||||
SpaceParent = "m.space.parent"
|
||||
|
||||
MSC2716_INSERTION = "org.matrix.msc2716.insertion"
|
||||
MSC2716_CHUNK = "org.matrix.msc2716.chunk"
|
||||
MSC2716_BATCH = "org.matrix.msc2716.batch"
|
||||
MSC2716_MARKER = "org.matrix.msc2716.marker"
|
||||
|
||||
|
||||
@@ -176,6 +176,7 @@ class RelationTypes:
|
||||
ANNOTATION = "m.annotation"
|
||||
REPLACE = "m.replace"
|
||||
REFERENCE = "m.reference"
|
||||
THREAD = "io.element.thread"
|
||||
|
||||
|
||||
class LimitBlockingTypes:
|
||||
@@ -209,14 +210,17 @@ class EventContentFields:
|
||||
|
||||
# Used on normal messages to indicate they were historically imported after the fact
|
||||
MSC2716_HISTORICAL = "org.matrix.msc2716.historical"
|
||||
# For "insertion" events to indicate what the next chunk ID should be in
|
||||
# For "insertion" events to indicate what the next batch ID should be in
|
||||
# order to connect to it
|
||||
MSC2716_NEXT_CHUNK_ID = "org.matrix.msc2716.next_chunk_id"
|
||||
# Used on "chunk" events to indicate which insertion event it connects to
|
||||
MSC2716_CHUNK_ID = "org.matrix.msc2716.chunk_id"
|
||||
MSC2716_NEXT_BATCH_ID = "org.matrix.msc2716.next_batch_id"
|
||||
# Used on "batch" events to indicate which insertion event it connects to
|
||||
MSC2716_BATCH_ID = "org.matrix.msc2716.batch_id"
|
||||
# For "marker" events
|
||||
MSC2716_MARKER_INSERTION = "org.matrix.msc2716.marker.insertion"
|
||||
|
||||
# The authorising user for joining a restricted room.
|
||||
AUTHORISING_USER = "join_authorised_via_users_server"
|
||||
|
||||
|
||||
class RoomTypes:
|
||||
"""Understood values of the room_type field of m.room.create events."""
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
import logging
|
||||
import typing
|
||||
from http import HTTPStatus
|
||||
from typing import Dict, List, Optional, Union
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
@@ -143,7 +143,7 @@ class SynapseError(CodeMessageException):
|
||||
super().__init__(code, msg)
|
||||
self.errcode = errcode
|
||||
|
||||
def error_dict(self):
|
||||
def error_dict(self) -> "JsonDict":
|
||||
return cs_error(self.msg, self.errcode)
|
||||
|
||||
|
||||
@@ -175,7 +175,7 @@ class ProxiedRequestError(SynapseError):
|
||||
else:
|
||||
self._additional_fields = dict(additional_fields)
|
||||
|
||||
def error_dict(self):
|
||||
def error_dict(self) -> "JsonDict":
|
||||
return cs_error(self.msg, self.errcode, **self._additional_fields)
|
||||
|
||||
|
||||
@@ -196,7 +196,7 @@ class ConsentNotGivenError(SynapseError):
|
||||
)
|
||||
self._consent_uri = consent_uri
|
||||
|
||||
def error_dict(self):
|
||||
def error_dict(self) -> "JsonDict":
|
||||
return cs_error(self.msg, self.errcode, consent_uri=self._consent_uri)
|
||||
|
||||
|
||||
@@ -262,14 +262,10 @@ class InteractiveAuthIncompleteError(Exception):
|
||||
class UnrecognizedRequestError(SynapseError):
|
||||
"""An error indicating we don't understand the request you're trying to make"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
kwargs["errcode"] = Codes.UNRECOGNIZED
|
||||
if len(args) == 0:
|
||||
message = "Unrecognized request"
|
||||
else:
|
||||
message = args[0]
|
||||
super().__init__(400, message, **kwargs)
|
||||
def __init__(
|
||||
self, msg: str = "Unrecognized request", errcode: str = Codes.UNRECOGNIZED
|
||||
):
|
||||
super().__init__(400, msg, errcode)
|
||||
|
||||
|
||||
class NotFoundError(SynapseError):
|
||||
@@ -284,10 +280,8 @@ class AuthError(SynapseError):
|
||||
other poorly-defined times.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
kwargs["errcode"] = Codes.FORBIDDEN
|
||||
super().__init__(*args, **kwargs)
|
||||
def __init__(self, code: int, msg: str, errcode: str = Codes.FORBIDDEN):
|
||||
super().__init__(code, msg, errcode)
|
||||
|
||||
|
||||
class InvalidClientCredentialsError(SynapseError):
|
||||
@@ -321,7 +315,7 @@ class InvalidClientTokenError(InvalidClientCredentialsError):
|
||||
super().__init__(msg=msg, errcode="M_UNKNOWN_TOKEN")
|
||||
self._soft_logout = soft_logout
|
||||
|
||||
def error_dict(self):
|
||||
def error_dict(self) -> "JsonDict":
|
||||
d = super().error_dict()
|
||||
d["soft_logout"] = self._soft_logout
|
||||
return d
|
||||
@@ -345,7 +339,7 @@ class ResourceLimitError(SynapseError):
|
||||
self.limit_type = limit_type
|
||||
super().__init__(code, msg, errcode=errcode)
|
||||
|
||||
def error_dict(self):
|
||||
def error_dict(self) -> "JsonDict":
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
@@ -357,32 +351,17 @@ class ResourceLimitError(SynapseError):
|
||||
class EventSizeError(SynapseError):
|
||||
"""An error raised when an event is too big."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
kwargs["errcode"] = Codes.TOO_LARGE
|
||||
super().__init__(413, *args, **kwargs)
|
||||
|
||||
|
||||
class EventStreamError(SynapseError):
|
||||
"""An error raised when there a problem with the event stream."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
kwargs["errcode"] = Codes.BAD_PAGINATION
|
||||
super().__init__(*args, **kwargs)
|
||||
def __init__(self, msg: str):
|
||||
super().__init__(413, msg, Codes.TOO_LARGE)
|
||||
|
||||
|
||||
class LoginError(SynapseError):
|
||||
"""An error raised when there was a problem logging in."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class StoreError(SynapseError):
|
||||
"""An error raised when there was a problem storing some data."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InvalidCaptchaError(SynapseError):
|
||||
def __init__(
|
||||
@@ -395,7 +374,7 @@ class InvalidCaptchaError(SynapseError):
|
||||
super().__init__(code, msg, errcode)
|
||||
self.error_url = error_url
|
||||
|
||||
def error_dict(self):
|
||||
def error_dict(self) -> "JsonDict":
|
||||
return cs_error(self.msg, self.errcode, error_url=self.error_url)
|
||||
|
||||
|
||||
@@ -412,7 +391,7 @@ class LimitExceededError(SynapseError):
|
||||
super().__init__(code, msg, errcode)
|
||||
self.retry_after_ms = retry_after_ms
|
||||
|
||||
def error_dict(self):
|
||||
def error_dict(self) -> "JsonDict":
|
||||
return cs_error(self.msg, self.errcode, retry_after_ms=self.retry_after_ms)
|
||||
|
||||
|
||||
@@ -443,10 +422,8 @@ class UnsupportedRoomVersionError(SynapseError):
|
||||
class ThreepidValidationError(SynapseError):
|
||||
"""An error raised when there was a problem authorising an event."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
kwargs["errcode"] = Codes.FORBIDDEN
|
||||
super().__init__(*args, **kwargs)
|
||||
def __init__(self, msg: str, errcode: str = Codes.FORBIDDEN):
|
||||
super().__init__(400, msg, errcode)
|
||||
|
||||
|
||||
class IncompatibleRoomVersionError(SynapseError):
|
||||
@@ -466,7 +443,7 @@ class IncompatibleRoomVersionError(SynapseError):
|
||||
|
||||
self._room_version = room_version
|
||||
|
||||
def error_dict(self):
|
||||
def error_dict(self) -> "JsonDict":
|
||||
return cs_error(self.msg, self.errcode, room_version=self._room_version)
|
||||
|
||||
|
||||
@@ -494,7 +471,7 @@ class RequestSendFailed(RuntimeError):
|
||||
errors (like programming errors).
|
||||
"""
|
||||
|
||||
def __init__(self, inner_exception, can_retry):
|
||||
def __init__(self, inner_exception: BaseException, can_retry: bool):
|
||||
super().__init__(
|
||||
"Failed to send request: %s: %s"
|
||||
% (type(inner_exception).__name__, inner_exception)
|
||||
@@ -503,7 +480,7 @@ class RequestSendFailed(RuntimeError):
|
||||
self.can_retry = can_retry
|
||||
|
||||
|
||||
def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs):
|
||||
def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs: Any) -> "JsonDict":
|
||||
"""Utility method for constructing an error response for client-server
|
||||
interactions.
|
||||
|
||||
@@ -551,7 +528,7 @@ class FederationError(RuntimeError):
|
||||
msg = "%s %s: %s" % (level, code, reason)
|
||||
super().__init__(msg)
|
||||
|
||||
def get_dict(self):
|
||||
def get_dict(self) -> "JsonDict":
|
||||
return {
|
||||
"level": self.level,
|
||||
"code": self.code,
|
||||
@@ -580,7 +557,7 @@ class HttpResponseException(CodeMessageException):
|
||||
super().__init__(code, msg)
|
||||
self.response = response
|
||||
|
||||
def to_synapse_error(self):
|
||||
def to_synapse_error(self) -> SynapseError:
|
||||
"""Make a SynapseError based on an HTTPResponseException
|
||||
|
||||
This is useful when a proxied request has failed, and we need to
|
||||
|
||||
@@ -15,7 +15,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import json
|
||||
from typing import List
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Awaitable,
|
||||
Container,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
import jsonschema
|
||||
from jsonschema import FormatChecker
|
||||
@@ -23,7 +33,11 @@ from jsonschema import FormatChecker
|
||||
from synapse.api.constants import EventContentFields
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.types import RoomID, UserID
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import JsonDict, RoomID, UserID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
FILTER_SCHEMA = {
|
||||
"additionalProperties": False,
|
||||
@@ -120,25 +134,29 @@ USER_FILTER_SCHEMA = {
|
||||
|
||||
|
||||
@FormatChecker.cls_checks("matrix_room_id")
|
||||
def matrix_room_id_validator(room_id_str):
|
||||
def matrix_room_id_validator(room_id_str: str) -> RoomID:
|
||||
return RoomID.from_string(room_id_str)
|
||||
|
||||
|
||||
@FormatChecker.cls_checks("matrix_user_id")
|
||||
def matrix_user_id_validator(user_id_str):
|
||||
def matrix_user_id_validator(user_id_str: str) -> UserID:
|
||||
return UserID.from_string(user_id_str)
|
||||
|
||||
|
||||
class Filtering:
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def get_user_filter(self, user_localpart, filter_id):
|
||||
async def get_user_filter(
|
||||
self, user_localpart: str, filter_id: Union[int, str]
|
||||
) -> "FilterCollection":
|
||||
result = await self.store.get_user_filter(user_localpart, filter_id)
|
||||
return FilterCollection(result)
|
||||
|
||||
def add_user_filter(self, user_localpart, user_filter):
|
||||
def add_user_filter(
|
||||
self, user_localpart: str, user_filter: JsonDict
|
||||
) -> Awaitable[int]:
|
||||
self.check_valid_filter(user_filter)
|
||||
return self.store.add_user_filter(user_localpart, user_filter)
|
||||
|
||||
@@ -146,13 +164,13 @@ class Filtering:
|
||||
# replace_user_filter at some point? There's no REST API specified for
|
||||
# them however
|
||||
|
||||
def check_valid_filter(self, user_filter_json):
|
||||
def check_valid_filter(self, user_filter_json: JsonDict) -> None:
|
||||
"""Check if the provided filter is valid.
|
||||
|
||||
This inspects all definitions contained within the filter.
|
||||
|
||||
Args:
|
||||
user_filter_json(dict): The filter
|
||||
user_filter_json: The filter
|
||||
Raises:
|
||||
SynapseError: If the filter is not valid.
|
||||
"""
|
||||
@@ -167,8 +185,12 @@ class Filtering:
|
||||
raise SynapseError(400, str(e))
|
||||
|
||||
|
||||
# Filters work across events, presence EDUs, and account data.
|
||||
FilterEvent = TypeVar("FilterEvent", EventBase, UserPresenceState, JsonDict)
|
||||
|
||||
|
||||
class FilterCollection:
|
||||
def __init__(self, filter_json):
|
||||
def __init__(self, filter_json: JsonDict):
|
||||
self._filter_json = filter_json
|
||||
|
||||
room_filter_json = self._filter_json.get("room", {})
|
||||
@@ -188,59 +210,61 @@ class FilterCollection:
|
||||
self.event_fields = filter_json.get("event_fields", [])
|
||||
self.event_format = filter_json.get("event_format", "client")
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
return "<FilterCollection %s>" % (json.dumps(self._filter_json),)
|
||||
|
||||
def get_filter_json(self):
|
||||
def get_filter_json(self) -> JsonDict:
|
||||
return self._filter_json
|
||||
|
||||
def timeline_limit(self):
|
||||
def timeline_limit(self) -> int:
|
||||
return self._room_timeline_filter.limit()
|
||||
|
||||
def presence_limit(self):
|
||||
def presence_limit(self) -> int:
|
||||
return self._presence_filter.limit()
|
||||
|
||||
def ephemeral_limit(self):
|
||||
def ephemeral_limit(self) -> int:
|
||||
return self._room_ephemeral_filter.limit()
|
||||
|
||||
def lazy_load_members(self):
|
||||
def lazy_load_members(self) -> bool:
|
||||
return self._room_state_filter.lazy_load_members()
|
||||
|
||||
def include_redundant_members(self):
|
||||
def include_redundant_members(self) -> bool:
|
||||
return self._room_state_filter.include_redundant_members()
|
||||
|
||||
def filter_presence(self, events):
|
||||
def filter_presence(
|
||||
self, events: Iterable[UserPresenceState]
|
||||
) -> List[UserPresenceState]:
|
||||
return self._presence_filter.filter(events)
|
||||
|
||||
def filter_account_data(self, events):
|
||||
def filter_account_data(self, events: Iterable[JsonDict]) -> List[JsonDict]:
|
||||
return self._account_data.filter(events)
|
||||
|
||||
def filter_room_state(self, events):
|
||||
def filter_room_state(self, events: Iterable[EventBase]) -> List[EventBase]:
|
||||
return self._room_state_filter.filter(self._room_filter.filter(events))
|
||||
|
||||
def filter_room_timeline(self, events):
|
||||
def filter_room_timeline(self, events: Iterable[EventBase]) -> List[EventBase]:
|
||||
return self._room_timeline_filter.filter(self._room_filter.filter(events))
|
||||
|
||||
def filter_room_ephemeral(self, events):
|
||||
def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> List[JsonDict]:
|
||||
return self._room_ephemeral_filter.filter(self._room_filter.filter(events))
|
||||
|
||||
def filter_room_account_data(self, events):
|
||||
def filter_room_account_data(self, events: Iterable[JsonDict]) -> List[JsonDict]:
|
||||
return self._room_account_data.filter(self._room_filter.filter(events))
|
||||
|
||||
def blocks_all_presence(self):
|
||||
def blocks_all_presence(self) -> bool:
|
||||
return (
|
||||
self._presence_filter.filters_all_types()
|
||||
or self._presence_filter.filters_all_senders()
|
||||
)
|
||||
|
||||
def blocks_all_room_ephemeral(self):
|
||||
def blocks_all_room_ephemeral(self) -> bool:
|
||||
return (
|
||||
self._room_ephemeral_filter.filters_all_types()
|
||||
or self._room_ephemeral_filter.filters_all_senders()
|
||||
or self._room_ephemeral_filter.filters_all_rooms()
|
||||
)
|
||||
|
||||
def blocks_all_room_timeline(self):
|
||||
def blocks_all_room_timeline(self) -> bool:
|
||||
return (
|
||||
self._room_timeline_filter.filters_all_types()
|
||||
or self._room_timeline_filter.filters_all_senders()
|
||||
@@ -249,7 +273,7 @@ class FilterCollection:
|
||||
|
||||
|
||||
class Filter:
|
||||
def __init__(self, filter_json):
|
||||
def __init__(self, filter_json: JsonDict):
|
||||
self.filter_json = filter_json
|
||||
|
||||
self.types = self.filter_json.get("types", None)
|
||||
@@ -266,26 +290,26 @@ class Filter:
|
||||
self.labels = self.filter_json.get("org.matrix.labels", None)
|
||||
self.not_labels = self.filter_json.get("org.matrix.not_labels", [])
|
||||
|
||||
def filters_all_types(self):
|
||||
def filters_all_types(self) -> bool:
|
||||
return "*" in self.not_types
|
||||
|
||||
def filters_all_senders(self):
|
||||
def filters_all_senders(self) -> bool:
|
||||
return "*" in self.not_senders
|
||||
|
||||
def filters_all_rooms(self):
|
||||
def filters_all_rooms(self) -> bool:
|
||||
return "*" in self.not_rooms
|
||||
|
||||
def check(self, event):
|
||||
def check(self, event: FilterEvent) -> bool:
|
||||
"""Checks whether the filter matches the given event.
|
||||
|
||||
Returns:
|
||||
bool: True if the event matches
|
||||
True if the event matches
|
||||
"""
|
||||
# We usually get the full "events" as dictionaries coming through,
|
||||
# except for presence which actually gets passed around as its own
|
||||
# namedtuple type.
|
||||
if isinstance(event, UserPresenceState):
|
||||
sender = event.user_id
|
||||
sender: Optional[str] = event.user_id
|
||||
room_id = None
|
||||
ev_type = "m.presence"
|
||||
contains_url = False
|
||||
@@ -305,18 +329,25 @@ class Filter:
|
||||
room_id = event.get("room_id", None)
|
||||
ev_type = event.get("type", None)
|
||||
|
||||
content = event.get("content", {})
|
||||
content = event.get("content") or {}
|
||||
# check if there is a string url field in the content for filtering purposes
|
||||
contains_url = isinstance(content.get("url"), str)
|
||||
labels = content.get(EventContentFields.LABELS, [])
|
||||
|
||||
return self.check_fields(room_id, sender, ev_type, labels, contains_url)
|
||||
|
||||
def check_fields(self, room_id, sender, event_type, labels, contains_url):
|
||||
def check_fields(
|
||||
self,
|
||||
room_id: Optional[str],
|
||||
sender: Optional[str],
|
||||
event_type: Optional[str],
|
||||
labels: Container[str],
|
||||
contains_url: bool,
|
||||
) -> bool:
|
||||
"""Checks whether the filter matches the given event fields.
|
||||
|
||||
Returns:
|
||||
bool: True if the event fields match
|
||||
True if the event fields match
|
||||
"""
|
||||
literal_keys = {
|
||||
"rooms": lambda v: room_id == v,
|
||||
@@ -343,14 +374,14 @@ class Filter:
|
||||
|
||||
return True
|
||||
|
||||
def filter_rooms(self, room_ids):
|
||||
def filter_rooms(self, room_ids: Iterable[str]) -> Set[str]:
|
||||
"""Apply the 'rooms' filter to a given list of rooms.
|
||||
|
||||
Args:
|
||||
room_ids (list): A list of room_ids.
|
||||
room_ids: A list of room_ids.
|
||||
|
||||
Returns:
|
||||
list: A list of room_ids that match the filter
|
||||
A list of room_ids that match the filter
|
||||
"""
|
||||
room_ids = set(room_ids)
|
||||
|
||||
@@ -363,23 +394,23 @@ class Filter:
|
||||
|
||||
return room_ids
|
||||
|
||||
def filter(self, events):
|
||||
def filter(self, events: Iterable[FilterEvent]) -> List[FilterEvent]:
|
||||
return list(filter(self.check, events))
|
||||
|
||||
def limit(self):
|
||||
def limit(self) -> int:
|
||||
return self.filter_json.get("limit", 10)
|
||||
|
||||
def lazy_load_members(self):
|
||||
def lazy_load_members(self) -> bool:
|
||||
return self.filter_json.get("lazy_load_members", False)
|
||||
|
||||
def include_redundant_members(self):
|
||||
def include_redundant_members(self) -> bool:
|
||||
return self.filter_json.get("include_redundant_members", False)
|
||||
|
||||
def with_room_ids(self, room_ids):
|
||||
def with_room_ids(self, room_ids: Iterable[str]) -> "Filter":
|
||||
"""Returns a new filter with the given room IDs appended.
|
||||
|
||||
Args:
|
||||
room_ids (iterable[unicode]): The room_ids to add
|
||||
room_ids: The room_ids to add
|
||||
|
||||
Returns:
|
||||
filter: A new filter including the given rooms and the old
|
||||
@@ -390,8 +421,8 @@ class Filter:
|
||||
return newFilter
|
||||
|
||||
|
||||
def _matches_wildcard(actual_value, filter_value):
|
||||
if filter_value.endswith("*"):
|
||||
def _matches_wildcard(actual_value: Optional[str], filter_value: str) -> bool:
|
||||
if filter_value.endswith("*") and isinstance(actual_value, str):
|
||||
type_prefix = filter_value[:-1]
|
||||
return actual_value.startswith(type_prefix)
|
||||
else:
|
||||
|
||||
@@ -12,49 +12,48 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from collections import namedtuple
|
||||
from typing import Any, Optional
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.api.constants import PresenceState
|
||||
from synapse.types import JsonDict
|
||||
|
||||
|
||||
class UserPresenceState(
|
||||
namedtuple(
|
||||
"UserPresenceState",
|
||||
(
|
||||
"user_id",
|
||||
"state",
|
||||
"last_active_ts",
|
||||
"last_federation_update_ts",
|
||||
"last_user_sync_ts",
|
||||
"status_msg",
|
||||
"currently_active",
|
||||
),
|
||||
)
|
||||
):
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class UserPresenceState:
|
||||
"""Represents the current presence state of the user.
|
||||
|
||||
user_id (str)
|
||||
last_active (int): Time in msec that the user last interacted with server.
|
||||
last_federation_update (int): Time in msec since either a) we sent a presence
|
||||
user_id
|
||||
last_active: Time in msec that the user last interacted with server.
|
||||
last_federation_update: Time in msec since either a) we sent a presence
|
||||
update to other servers or b) we received a presence update, depending
|
||||
on if is a local user or not.
|
||||
last_user_sync (int): Time in msec that the user last *completed* a sync
|
||||
last_user_sync: Time in msec that the user last *completed* a sync
|
||||
(or event stream).
|
||||
status_msg (str): User set status message.
|
||||
status_msg: User set status message.
|
||||
"""
|
||||
|
||||
def as_dict(self):
|
||||
return dict(self._asdict())
|
||||
user_id: str
|
||||
state: str
|
||||
last_active_ts: int
|
||||
last_federation_update_ts: int
|
||||
last_user_sync_ts: int
|
||||
status_msg: Optional[str]
|
||||
currently_active: bool
|
||||
|
||||
def as_dict(self) -> JsonDict:
|
||||
return attr.asdict(self)
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d):
|
||||
def from_dict(d: JsonDict) -> "UserPresenceState":
|
||||
return UserPresenceState(**d)
|
||||
|
||||
def copy_and_replace(self, **kwargs):
|
||||
return self._replace(**kwargs)
|
||||
def copy_and_replace(self, **kwargs: Any) -> "UserPresenceState":
|
||||
return attr.evolve(self, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def default(cls, user_id):
|
||||
def default(cls, user_id: str) -> "UserPresenceState":
|
||||
"""Returns a default presence state."""
|
||||
return cls(
|
||||
user_id=user_id,
|
||||
|
||||
@@ -17,6 +17,7 @@ from collections import OrderedDict
|
||||
from typing import Hashable, Optional, Tuple
|
||||
|
||||
from synapse.api.errors import LimitExceededError
|
||||
from synapse.config.ratelimiting import RateLimitConfig
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import Requester
|
||||
from synapse.util import Clock
|
||||
@@ -160,7 +161,7 @@ class Ratelimiter:
|
||||
|
||||
return allowed, time_allowed
|
||||
|
||||
def _prune_message_counts(self, time_now_s: float):
|
||||
def _prune_message_counts(self, time_now_s: float) -> None:
|
||||
"""Remove message count entries that have not exceeded their defined
|
||||
rate_hz limit
|
||||
|
||||
@@ -189,7 +190,7 @@ class Ratelimiter:
|
||||
update: bool = True,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: Optional[float] = None,
|
||||
):
|
||||
) -> None:
|
||||
"""Checks if an action can be performed. If not, raises a LimitExceededError
|
||||
|
||||
Checks if the user has ratelimiting disabled in the database by looking
|
||||
@@ -233,3 +234,88 @@ class Ratelimiter:
|
||||
raise LimitExceededError(
|
||||
retry_after_ms=int(1000 * (time_allowed - time_now_s))
|
||||
)
|
||||
|
||||
|
||||
class RequestRatelimiter:
|
||||
def __init__(
|
||||
self,
|
||||
store: DataStore,
|
||||
clock: Clock,
|
||||
rc_message: RateLimitConfig,
|
||||
rc_admin_redaction: Optional[RateLimitConfig],
|
||||
):
|
||||
self.store = store
|
||||
self.clock = clock
|
||||
|
||||
# The rate_hz and burst_count are overridden on a per-user basis
|
||||
self.request_ratelimiter = Ratelimiter(
|
||||
store=self.store, clock=self.clock, rate_hz=0, burst_count=0
|
||||
)
|
||||
self._rc_message = rc_message
|
||||
|
||||
# Check whether ratelimiting room admin message redaction is enabled
|
||||
# by the presence of rate limits in the config
|
||||
if rc_admin_redaction:
|
||||
self.admin_redaction_ratelimiter: Optional[Ratelimiter] = Ratelimiter(
|
||||
store=self.store,
|
||||
clock=self.clock,
|
||||
rate_hz=rc_admin_redaction.per_second,
|
||||
burst_count=rc_admin_redaction.burst_count,
|
||||
)
|
||||
else:
|
||||
self.admin_redaction_ratelimiter = None
|
||||
|
||||
async def ratelimit(
|
||||
self,
|
||||
requester: Requester,
|
||||
update: bool = True,
|
||||
is_admin_redaction: bool = False,
|
||||
) -> None:
|
||||
"""Ratelimits requests.
|
||||
|
||||
Args:
|
||||
requester
|
||||
update: Whether to record that a request is being processed.
|
||||
Set to False when doing multiple checks for one request (e.g.
|
||||
to check up front if we would reject the request), and set to
|
||||
True for the last call for a given request.
|
||||
is_admin_redaction: Whether this is a room admin/moderator
|
||||
redacting an event. If so then we may apply different
|
||||
ratelimits depending on config.
|
||||
|
||||
Raises:
|
||||
LimitExceededError if the request should be ratelimited
|
||||
"""
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
# The AS user itself is never rate limited.
|
||||
app_service = self.store.get_app_service_by_user_id(user_id)
|
||||
if app_service is not None:
|
||||
return # do not ratelimit app service senders
|
||||
|
||||
messages_per_second = self._rc_message.per_second
|
||||
burst_count = self._rc_message.burst_count
|
||||
|
||||
# Check if there is a per user override in the DB.
|
||||
override = await self.store.get_ratelimit_for_user(user_id)
|
||||
if override:
|
||||
# If overridden with a null Hz then ratelimiting has been entirely
|
||||
# disabled for the user
|
||||
if not override.messages_per_second:
|
||||
return
|
||||
|
||||
messages_per_second = override.messages_per_second
|
||||
burst_count = override.burst_count
|
||||
|
||||
if is_admin_redaction and self.admin_redaction_ratelimiter:
|
||||
# If we have separate config for admin redactions, use a separate
|
||||
# ratelimiter as to not have user_ids clash
|
||||
await self.admin_redaction_ratelimiter.ratelimit(requester, update=update)
|
||||
else:
|
||||
# Override rate and burst count per-user
|
||||
await self.request_ratelimiter.ratelimit(
|
||||
requester,
|
||||
rate_hz=messages_per_second,
|
||||
burst_count=burst_count,
|
||||
update=update,
|
||||
)
|
||||
|
||||
@@ -244,24 +244,8 @@ class RoomVersions:
|
||||
msc2716_historical=False,
|
||||
msc2716_redactions=False,
|
||||
)
|
||||
MSC2716 = RoomVersion(
|
||||
"org.matrix.msc2716",
|
||||
RoomDisposition.UNSTABLE,
|
||||
EventFormatVersions.V3,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2176_redaction_rules=False,
|
||||
msc3083_join_rules=False,
|
||||
msc3375_redaction_rules=False,
|
||||
msc2403_knocking=True,
|
||||
msc2716_historical=True,
|
||||
msc2716_redactions=False,
|
||||
)
|
||||
MSC2716v2 = RoomVersion(
|
||||
"org.matrix.msc2716v2",
|
||||
MSC2716v3 = RoomVersion(
|
||||
"org.matrix.msc2716v3",
|
||||
RoomDisposition.UNSTABLE,
|
||||
EventFormatVersions.V3,
|
||||
StateResolutionVersions.V2,
|
||||
@@ -289,9 +273,9 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
|
||||
RoomVersions.V6,
|
||||
RoomVersions.MSC2176,
|
||||
RoomVersions.V7,
|
||||
RoomVersions.MSC2716,
|
||||
RoomVersions.V8,
|
||||
RoomVersions.V9,
|
||||
RoomVersions.MSC2716v3,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ from hashlib import sha256
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
|
||||
SYNAPSE_CLIENT_API_PREFIX = "/_synapse/client"
|
||||
CLIENT_API_PREFIX = "/_matrix/client"
|
||||
@@ -34,28 +35,24 @@ LEGACY_MEDIA_PREFIX = "/_matrix/media/v1"
|
||||
|
||||
|
||||
class ConsentURIBuilder:
|
||||
def __init__(self, hs_config):
|
||||
"""
|
||||
Args:
|
||||
hs_config (synapse.config.homeserver.HomeServerConfig):
|
||||
"""
|
||||
if hs_config.form_secret is None:
|
||||
def __init__(self, hs_config: HomeServerConfig):
|
||||
if hs_config.key.form_secret is None:
|
||||
raise ConfigError("form_secret not set in config")
|
||||
if hs_config.server.public_baseurl is None:
|
||||
raise ConfigError("public_baseurl not set in config")
|
||||
|
||||
self._hmac_secret = hs_config.form_secret.encode("utf-8")
|
||||
self._hmac_secret = hs_config.key.form_secret.encode("utf-8")
|
||||
self._public_baseurl = hs_config.server.public_baseurl
|
||||
|
||||
def build_user_consent_uri(self, user_id):
|
||||
def build_user_consent_uri(self, user_id: str) -> str:
|
||||
"""Build a URI which we can give to the user to do their privacy
|
||||
policy consent
|
||||
|
||||
Args:
|
||||
user_id (str): mxid or username of user
|
||||
user_id: mxid or username of user
|
||||
|
||||
Returns
|
||||
(str) the URI where the user can do consent
|
||||
The URI where the user can do consent
|
||||
"""
|
||||
mac = hmac.new(
|
||||
key=self._hmac_secret, msg=user_id.encode("ascii"), digestmod=sha256
|
||||
|
||||
@@ -31,6 +31,7 @@ import twisted
|
||||
from twisted.internet import defer, error, reactor
|
||||
from twisted.logger import LoggingFile, LogLevel
|
||||
from twisted.protocols.tls import TLSMemoryBIOFactory
|
||||
from twisted.python.threadpool import ThreadPool
|
||||
|
||||
import synapse
|
||||
from synapse.api.constants import MAX_PDU_SIZE
|
||||
@@ -42,11 +43,13 @@ from synapse.crypto import context_factory
|
||||
from synapse.events.presence_router import load_legacy_presence_router
|
||||
from synapse.events.spamcheck import load_legacy_spam_checkers
|
||||
from synapse.events.third_party_rules import load_legacy_third_party_event_rules
|
||||
from synapse.handlers.auth import load_legacy_password_auth_providers
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from synapse.metrics.jemalloc import setup_jemalloc_stats
|
||||
from synapse.util.caches.lrucache import setup_expire_lru_cache_entries
|
||||
from synapse.util.daemonize import daemonize_process
|
||||
from synapse.util.gai_resolver import GAIResolver
|
||||
from synapse.util.rlimit import change_resource_limit
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
@@ -86,11 +89,11 @@ def start_worker_reactor(appname, config, run_command=reactor.run):
|
||||
|
||||
start_reactor(
|
||||
appname,
|
||||
soft_file_limit=config.soft_file_limit,
|
||||
gc_thresholds=config.gc_thresholds,
|
||||
pid_file=config.worker_pid_file,
|
||||
daemonize=config.worker_daemonize,
|
||||
print_pidfile=config.print_pidfile,
|
||||
soft_file_limit=config.server.soft_file_limit,
|
||||
gc_thresholds=config.server.gc_thresholds,
|
||||
pid_file=config.worker.worker_pid_file,
|
||||
daemonize=config.worker.worker_daemonize,
|
||||
print_pidfile=config.server.print_pidfile,
|
||||
logger=logger,
|
||||
run_command=run_command,
|
||||
)
|
||||
@@ -293,15 +296,15 @@ def listen_ssl(
|
||||
return r
|
||||
|
||||
|
||||
def refresh_certificate(hs):
|
||||
def refresh_certificate(hs: "HomeServer"):
|
||||
"""
|
||||
Refresh the TLS certificates that Synapse is using by re-reading them from
|
||||
disk and updating the TLS context factories to use them.
|
||||
"""
|
||||
if not hs.config.has_tls_listener():
|
||||
if not hs.config.server.has_tls_listener():
|
||||
return
|
||||
|
||||
hs.config.read_certificate_from_disk()
|
||||
hs.config.tls.read_certificate_from_disk()
|
||||
hs.tls_server_context_factory = context_factory.ServerContextFactory(hs.config)
|
||||
|
||||
if hs._listening_services:
|
||||
@@ -337,9 +340,19 @@ async def start(hs: "HomeServer"):
|
||||
Args:
|
||||
hs: homeserver instance
|
||||
"""
|
||||
reactor = hs.get_reactor()
|
||||
|
||||
# We want to use a separate thread pool for the resolver so that large
|
||||
# numbers of DNS requests don't starve out other users of the threadpool.
|
||||
resolver_threadpool = ThreadPool(name="gai_resolver")
|
||||
resolver_threadpool.start()
|
||||
reactor.addSystemEventTrigger("during", "shutdown", resolver_threadpool.stop)
|
||||
reactor.installNameResolver(
|
||||
GAIResolver(reactor, getThreadPool=lambda: resolver_threadpool)
|
||||
)
|
||||
|
||||
# Set up the SIGHUP machinery.
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
reactor = hs.get_reactor()
|
||||
|
||||
@wrap_as_background_process("sighup")
|
||||
def handle_sighup(*args, **kwargs):
|
||||
@@ -379,6 +392,7 @@ async def start(hs: "HomeServer"):
|
||||
load_legacy_spam_checkers(hs)
|
||||
load_legacy_third_party_event_rules(hs)
|
||||
load_legacy_presence_router(hs)
|
||||
load_legacy_password_auth_providers(hs)
|
||||
|
||||
# If we've configured an expiry time for caches, start the background job now.
|
||||
setup_expire_lru_cache_entries(hs)
|
||||
@@ -417,19 +431,21 @@ async def start(hs: "HomeServer"):
|
||||
atexit.register(gc.freeze)
|
||||
|
||||
|
||||
def setup_sentry(hs):
|
||||
def setup_sentry(hs: "HomeServer"):
|
||||
"""Enable sentry integration, if enabled in configuration
|
||||
|
||||
Args:
|
||||
hs (synapse.server.HomeServer)
|
||||
hs
|
||||
"""
|
||||
|
||||
if not hs.config.sentry_enabled:
|
||||
if not hs.config.metrics.sentry_enabled:
|
||||
return
|
||||
|
||||
import sentry_sdk
|
||||
|
||||
sentry_sdk.init(dsn=hs.config.sentry_dsn, release=get_version_string(synapse))
|
||||
sentry_sdk.init(
|
||||
dsn=hs.config.metrics.sentry_dsn, release=get_version_string(synapse)
|
||||
)
|
||||
|
||||
# We set some default tags that give some context to this instance
|
||||
with sentry_sdk.configure_scope() as scope:
|
||||
@@ -445,7 +461,7 @@ def setup_sentry(hs):
|
||||
scope.set_tag("worker_name", name)
|
||||
|
||||
|
||||
def setup_sdnotify(hs):
|
||||
def setup_sdnotify(hs: "HomeServer"):
|
||||
"""Adds process state hooks to tell systemd what we are up to."""
|
||||
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
|
||||
@@ -39,6 +39,7 @@ from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.databases.main.room import RoomWorkerStore
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
@@ -58,6 +59,7 @@ class AdminCmdSlavedStore(
|
||||
SlavedEventStore,
|
||||
SlavedClientIpStore,
|
||||
BaseSlavedStore,
|
||||
RoomWorkerStore,
|
||||
):
|
||||
pass
|
||||
|
||||
@@ -66,11 +68,11 @@ class AdminCmdServer(HomeServer):
|
||||
DATASTORE_CLASS = AdminCmdSlavedStore
|
||||
|
||||
|
||||
async def export_data_command(hs, args):
|
||||
async def export_data_command(hs: HomeServer, args):
|
||||
"""Export data for a user.
|
||||
|
||||
Args:
|
||||
hs (HomeServer)
|
||||
hs
|
||||
args (argparse.Namespace)
|
||||
"""
|
||||
|
||||
@@ -185,24 +187,20 @@ def start(config_options):
|
||||
# a full worker config.
|
||||
config.worker.worker_app = "synapse.app.admin_cmd"
|
||||
|
||||
if (
|
||||
not config.worker_daemonize
|
||||
and not config.worker_log_file
|
||||
and not config.worker_log_config
|
||||
):
|
||||
if not config.worker.worker_daemonize and not config.worker.worker_log_config:
|
||||
# Since we're meant to be run as a "command" let's not redirect stdio
|
||||
# unless we've actually set log config.
|
||||
config.no_redirect_stdio = True
|
||||
config.logging.no_redirect_stdio = True
|
||||
|
||||
# Explicitly disable background processes
|
||||
config.update_user_directory = False
|
||||
config.server.update_user_directory = False
|
||||
config.worker.run_background_tasks = False
|
||||
config.start_pushers = False
|
||||
config.pusher_shard_config.instances = []
|
||||
config.send_federation = False
|
||||
config.federation_shard_config.instances = []
|
||||
config.worker.start_pushers = False
|
||||
config.worker.pusher_shard_config.instances = []
|
||||
config.worker.send_federation = False
|
||||
config.worker.federation_shard_config.instances = []
|
||||
|
||||
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
|
||||
|
||||
ss = AdminCmdServer(
|
||||
config.server.server_name,
|
||||
@@ -221,7 +219,7 @@ def start(config_options):
|
||||
|
||||
async def run():
|
||||
with LoggingContext("command"):
|
||||
_base.start(ss)
|
||||
await _base.start(ss)
|
||||
await args.func(ss, args)
|
||||
|
||||
_base.start_worker_reactor(
|
||||
|
||||
@@ -131,16 +131,16 @@ class KeyUploadServlet(RestServlet):
|
||||
|
||||
PATTERNS = client_patterns("/keys/upload(/(?P<device_id>[^/]+))?$")
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: HomeServer):
|
||||
"""
|
||||
Args:
|
||||
hs (synapse.server.HomeServer): server
|
||||
hs: server
|
||||
"""
|
||||
super().__init__()
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
self.http_client = hs.get_simple_http_client()
|
||||
self.main_uri = hs.config.worker_main_http_uri
|
||||
self.main_uri = hs.config.worker.worker_main_http_uri
|
||||
|
||||
async def on_POST(self, request: Request, device_id: Optional[str]):
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
@@ -321,7 +321,7 @@ class GenericWorkerServer(HomeServer):
|
||||
elif name == "federation":
|
||||
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
||||
elif name == "media":
|
||||
if self.config.can_load_media_repo:
|
||||
if self.config.media.can_load_media_repo:
|
||||
media_repo = self.get_media_repository_resource()
|
||||
|
||||
# We need to serve the admin servlets for media on the
|
||||
@@ -384,7 +384,7 @@ class GenericWorkerServer(HomeServer):
|
||||
logger.info("Synapse worker now listening on port %d", port)
|
||||
|
||||
def start_listening(self):
|
||||
for listener in self.config.worker_listeners:
|
||||
for listener in self.config.worker.worker_listeners:
|
||||
if listener.type == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener.type == "manhole":
|
||||
@@ -395,7 +395,7 @@ class GenericWorkerServer(HomeServer):
|
||||
manhole_globals={"hs": self},
|
||||
)
|
||||
elif listener.type == "metrics":
|
||||
if not self.config.enable_metrics:
|
||||
if not self.config.metrics.enable_metrics:
|
||||
logger.warning(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
@@ -462,7 +462,7 @@ def start(config_options):
|
||||
# For other worker types we force this to off.
|
||||
config.server.update_user_directory = False
|
||||
|
||||
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
|
||||
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage
|
||||
|
||||
if config.server.gc_seconds:
|
||||
@@ -488,7 +488,7 @@ def start(config_options):
|
||||
register_start(_base.start, hs)
|
||||
|
||||
# redirect stdio to the logs, if configured.
|
||||
if not hs.config.no_redirect_stdio:
|
||||
if not hs.config.logging.no_redirect_stdio:
|
||||
redirect_stdio_to_logs()
|
||||
|
||||
_base.start_worker_reactor("synapse-generic-worker", config)
|
||||
|
||||
@@ -195,7 +195,7 @@ class SynapseHomeServer(HomeServer):
|
||||
}
|
||||
)
|
||||
|
||||
if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
|
||||
if self.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
|
||||
from synapse.rest.synapse.client.password_reset import (
|
||||
PasswordResetSubmitTokenResource,
|
||||
)
|
||||
@@ -234,7 +234,7 @@ class SynapseHomeServer(HomeServer):
|
||||
)
|
||||
|
||||
if name in ["media", "federation", "client"]:
|
||||
if self.config.enable_media_repo:
|
||||
if self.config.server.enable_media_repo:
|
||||
media_repo = self.get_media_repository_resource()
|
||||
resources.update(
|
||||
{MEDIA_PREFIX: media_repo, LEGACY_MEDIA_PREFIX: media_repo}
|
||||
@@ -248,7 +248,7 @@ class SynapseHomeServer(HomeServer):
|
||||
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
||||
|
||||
if name == "webclient":
|
||||
webclient_loc = self.config.web_client_location
|
||||
webclient_loc = self.config.server.web_client_location
|
||||
|
||||
if webclient_loc is None:
|
||||
logger.warning(
|
||||
@@ -269,7 +269,7 @@ class SynapseHomeServer(HomeServer):
|
||||
# https://twistedmatrix.com/trac/ticket/7678
|
||||
resources[WEB_CLIENT_PREFIX] = File(webclient_loc)
|
||||
|
||||
if name == "metrics" and self.config.enable_metrics:
|
||||
if name == "metrics" and self.config.metrics.enable_metrics:
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
|
||||
if name == "replication":
|
||||
@@ -278,7 +278,7 @@ class SynapseHomeServer(HomeServer):
|
||||
return resources
|
||||
|
||||
def start_listening(self):
|
||||
if self.config.redis_enabled:
|
||||
if self.config.redis.redis_enabled:
|
||||
# If redis is enabled we connect via the replication command handler
|
||||
# in the same way as the workers (since we're effectively a client
|
||||
# rather than a server).
|
||||
@@ -305,7 +305,7 @@ class SynapseHomeServer(HomeServer):
|
||||
for s in services:
|
||||
reactor.addSystemEventTrigger("before", "shutdown", s.stopListening)
|
||||
elif listener.type == "metrics":
|
||||
if not self.config.enable_metrics:
|
||||
if not self.config.metrics.enable_metrics:
|
||||
logger.warning(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
@@ -343,7 +343,7 @@ def setup(config_options):
|
||||
# generating config files and shouldn't try to continue.
|
||||
sys.exit(0)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
|
||||
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage
|
||||
|
||||
if config.server.gc_seconds:
|
||||
@@ -366,7 +366,7 @@ def setup(config_options):
|
||||
|
||||
async def start():
|
||||
# Load the OIDC provider metadatas, if OIDC is enabled.
|
||||
if hs.config.oidc_enabled:
|
||||
if hs.config.oidc.oidc_enabled:
|
||||
oidc = hs.get_oidc_handler()
|
||||
# Loading the provider metadata also ensures the provider config is valid.
|
||||
await oidc.load_metadata()
|
||||
@@ -412,7 +412,7 @@ def format_config_error(e: ConfigError) -> Iterator[str]:
|
||||
e = e.__cause__
|
||||
|
||||
|
||||
def run(hs):
|
||||
def run(hs: HomeServer):
|
||||
PROFILE_SYNAPSE = False
|
||||
if PROFILE_SYNAPSE:
|
||||
|
||||
@@ -439,11 +439,11 @@ def run(hs):
|
||||
|
||||
_base.start_reactor(
|
||||
"synapse-homeserver",
|
||||
soft_file_limit=hs.config.soft_file_limit,
|
||||
gc_thresholds=hs.config.gc_thresholds,
|
||||
pid_file=hs.config.pid_file,
|
||||
daemonize=hs.config.daemonize,
|
||||
print_pidfile=hs.config.print_pidfile,
|
||||
soft_file_limit=hs.config.server.soft_file_limit,
|
||||
gc_thresholds=hs.config.server.gc_thresholds,
|
||||
pid_file=hs.config.server.pid_file,
|
||||
daemonize=hs.config.server.daemonize,
|
||||
print_pidfile=hs.config.server.print_pidfile,
|
||||
logger=logger,
|
||||
)
|
||||
|
||||
@@ -455,7 +455,7 @@ def main():
|
||||
hs = setup(sys.argv[1:])
|
||||
|
||||
# redirect stdio to the logs, if configured.
|
||||
if not hs.config.no_redirect_stdio:
|
||||
if not hs.config.logging.no_redirect_stdio:
|
||||
redirect_stdio_to_logs()
|
||||
|
||||
run(hs)
|
||||
|
||||
@@ -15,11 +15,15 @@ import logging
|
||||
import math
|
||||
import resource
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from prometheus_client import Gauge
|
||||
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger("synapse.app.homeserver")
|
||||
|
||||
# Contains the list of processes we will be monitoring
|
||||
@@ -41,7 +45,7 @@ registered_reserved_users_mau_gauge = Gauge(
|
||||
|
||||
|
||||
@wrap_as_background_process("phone_stats_home")
|
||||
async def phone_stats_home(hs, stats, stats_process=_stats_process):
|
||||
async def phone_stats_home(hs: "HomeServer", stats, stats_process=_stats_process):
|
||||
logger.info("Gathering stats for reporting")
|
||||
now = int(hs.get_clock().time())
|
||||
uptime = int(now - hs.start_time)
|
||||
@@ -74,7 +78,7 @@ async def phone_stats_home(hs, stats, stats_process=_stats_process):
|
||||
store = hs.get_datastore()
|
||||
|
||||
stats["homeserver"] = hs.config.server.server_name
|
||||
stats["server_context"] = hs.config.server_context
|
||||
stats["server_context"] = hs.config.server.server_context
|
||||
stats["timestamp"] = now
|
||||
stats["uptime_seconds"] = uptime
|
||||
version = sys.version_info
|
||||
@@ -131,16 +135,18 @@ async def phone_stats_home(hs, stats, stats_process=_stats_process):
|
||||
log_level = synapse_logger.getEffectiveLevel()
|
||||
stats["log_level"] = logging.getLevelName(log_level)
|
||||
|
||||
logger.info("Reporting stats to %s: %s" % (hs.config.report_stats_endpoint, stats))
|
||||
logger.info(
|
||||
"Reporting stats to %s: %s" % (hs.config.metrics.report_stats_endpoint, stats)
|
||||
)
|
||||
try:
|
||||
await hs.get_proxied_http_client().put_json(
|
||||
hs.config.report_stats_endpoint, stats
|
||||
hs.config.metrics.report_stats_endpoint, stats
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("Error reporting stats: %s", e)
|
||||
|
||||
|
||||
def start_phone_stats_home(hs):
|
||||
def start_phone_stats_home(hs: "HomeServer"):
|
||||
"""
|
||||
Start the background tasks which report phone home stats.
|
||||
"""
|
||||
@@ -169,7 +175,7 @@ def start_phone_stats_home(hs):
|
||||
current_mau_count_by_service = {}
|
||||
reserved_users = ()
|
||||
store = hs.get_datastore()
|
||||
if hs.config.limit_usage_by_mau or hs.config.mau_stats_only:
|
||||
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
|
||||
current_mau_count = await store.get_monthly_active_count()
|
||||
current_mau_count_by_service = (
|
||||
await store.get_monthly_active_count_by_service()
|
||||
@@ -181,14 +187,14 @@ def start_phone_stats_home(hs):
|
||||
current_mau_by_service_gauge.labels(app_service).set(float(count))
|
||||
|
||||
registered_reserved_users_mau_gauge.set(float(len(reserved_users)))
|
||||
max_mau_gauge.set(float(hs.config.max_mau_value))
|
||||
max_mau_gauge.set(float(hs.config.server.max_mau_value))
|
||||
|
||||
if hs.config.limit_usage_by_mau or hs.config.mau_stats_only:
|
||||
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
|
||||
generate_monthly_active_users()
|
||||
clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000)
|
||||
# End of monthly active user settings
|
||||
|
||||
if hs.config.report_stats:
|
||||
if hs.config.metrics.report_stats:
|
||||
logger.info("Scheduling stats reporting for 3 hour intervals")
|
||||
clock.looping_call(phone_stats_home, 3 * 60 * 60 * 1000, hs, stats)
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ from synapse.util.caches.response_cache import ResponseCache
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -84,7 +85,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
pushing.
|
||||
"""
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -11,25 +12,44 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import sys
|
||||
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
def main(args):
|
||||
action = args[1] if len(args) > 1 and args[1] == "read" else None
|
||||
# If we're reading a key in the config file, then `args[1]` will be `read` and `args[2]`
|
||||
# will be the key to read.
|
||||
# We'll want to rework this code if we want to support more actions than just `read`.
|
||||
load_config_args = args[3:] if action else args[1:]
|
||||
|
||||
action = sys.argv[1]
|
||||
try:
|
||||
config = HomeServerConfig.load_config("", load_config_args)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
print("Config parses OK!")
|
||||
|
||||
if action == "read":
|
||||
key = sys.argv[2]
|
||||
key = args[2]
|
||||
key_parts = key.split(".")
|
||||
|
||||
value = config
|
||||
try:
|
||||
config = HomeServerConfig.load_config("", sys.argv[3:])
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
while len(key_parts):
|
||||
value = getattr(value, key_parts[0])
|
||||
key_parts.pop(0)
|
||||
|
||||
print(f"\n{key}: {value}")
|
||||
except AttributeError:
|
||||
print(
|
||||
f"\nNo '{key}' key could be found in the provided configuration file."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
print(getattr(config, key))
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.stderr.write("Unknown command %r\n" % (action,))
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
|
||||
@@ -118,21 +118,6 @@ class Config:
|
||||
"synapse", "res/templates"
|
||||
)
|
||||
|
||||
def __getattr__(self, item: str) -> Any:
|
||||
"""
|
||||
Try and fetch a configuration option that does not exist on this class.
|
||||
|
||||
This is so that existing configs that rely on `self.value`, where value
|
||||
is actually from a different config section, continue to work.
|
||||
"""
|
||||
if item in ["generate_config_section", "read_config"]:
|
||||
raise AttributeError(item)
|
||||
|
||||
if self.root is None:
|
||||
raise AttributeError(item)
|
||||
else:
|
||||
return self.root._get_unclassed_config(self.section, item)
|
||||
|
||||
@staticmethod
|
||||
def parse_size(value):
|
||||
if isinstance(value, int):
|
||||
@@ -200,11 +185,7 @@ class Config:
|
||||
@classmethod
|
||||
def ensure_directory(cls, dir_path):
|
||||
dir_path = cls.abspath(dir_path)
|
||||
try:
|
||||
os.makedirs(dir_path)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
os.makedirs(dir_path, exist_ok=True)
|
||||
if not os.path.isdir(dir_path):
|
||||
raise ConfigError("%s is not a directory" % (dir_path,))
|
||||
return dir_path
|
||||
@@ -293,7 +274,9 @@ class Config:
|
||||
env.filters.update(
|
||||
{
|
||||
"format_ts": _format_ts_filter,
|
||||
"mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl),
|
||||
"mxc_to_http": _create_mxc_to_http_filter(
|
||||
self.root.server.public_baseurl
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -315,8 +298,6 @@ class RootConfig:
|
||||
config_classes = []
|
||||
|
||||
def __init__(self):
|
||||
self._configs = OrderedDict()
|
||||
|
||||
for config_class in self.config_classes:
|
||||
if config_class.section is None:
|
||||
raise ValueError("%r requires a section name" % (config_class,))
|
||||
@@ -325,42 +306,7 @@ class RootConfig:
|
||||
conf = config_class(self)
|
||||
except Exception as e:
|
||||
raise Exception("Failed making %s: %r" % (config_class.section, e))
|
||||
self._configs[config_class.section] = conf
|
||||
|
||||
def __getattr__(self, item: str) -> Any:
|
||||
"""
|
||||
Redirect lookups on this object either to config objects, or values on
|
||||
config objects, so that `config.tls.blah` works, as well as legacy uses
|
||||
of things like `config.server_name`. It will first look up the config
|
||||
section name, and then values on those config classes.
|
||||
"""
|
||||
if item in self._configs.keys():
|
||||
return self._configs[item]
|
||||
|
||||
return self._get_unclassed_config(None, item)
|
||||
|
||||
def _get_unclassed_config(self, asking_section: Optional[str], item: str):
|
||||
"""
|
||||
Fetch a config value from one of the instantiated config classes that
|
||||
has not been fetched directly.
|
||||
|
||||
Args:
|
||||
asking_section: If this check is coming from a Config child, which
|
||||
one? This section will not be asked if it has the value.
|
||||
item: The configuration value key.
|
||||
|
||||
Raises:
|
||||
AttributeError if no config classes have the config key. The body
|
||||
will contain what sections were checked.
|
||||
"""
|
||||
for key, val in self._configs.items():
|
||||
if key == asking_section:
|
||||
continue
|
||||
|
||||
if item in dir(val):
|
||||
return getattr(val, item)
|
||||
|
||||
raise AttributeError(item, "not found in %s" % (list(self._configs.keys()),))
|
||||
setattr(self, config_class.section, conf)
|
||||
|
||||
def invoke_all(self, func_name: str, *args, **kwargs) -> MutableMapping[str, Any]:
|
||||
"""
|
||||
@@ -377,9 +323,11 @@ class RootConfig:
|
||||
"""
|
||||
res = OrderedDict()
|
||||
|
||||
for name, config in self._configs.items():
|
||||
for config_class in self.config_classes:
|
||||
config = getattr(self, config_class.section)
|
||||
|
||||
if hasattr(config, func_name):
|
||||
res[name] = getattr(config, func_name)(*args, **kwargs)
|
||||
res[config_class.section] = getattr(config, func_name)(*args, **kwargs)
|
||||
|
||||
return res
|
||||
|
||||
@@ -693,8 +641,7 @@ class RootConfig:
|
||||
open_private_ports=config_args.open_private_ports,
|
||||
)
|
||||
|
||||
if not path_exists(config_dir_path):
|
||||
os.makedirs(config_dir_path)
|
||||
os.makedirs(config_dir_path, exist_ok=True)
|
||||
with open(config_path, "w") as config_file:
|
||||
config_file.write(config_str)
|
||||
config_file.write("\n\n# vim:ft=yaml")
|
||||
|
||||
@@ -26,6 +26,7 @@ from synapse.config import (
|
||||
redis,
|
||||
registration,
|
||||
repository,
|
||||
retention,
|
||||
room_directory,
|
||||
saml2,
|
||||
server,
|
||||
@@ -91,6 +92,7 @@ class RootConfig:
|
||||
modules: modules.ModulesConfig
|
||||
caches: cache.CacheConfig
|
||||
federation: federation.FederationConfig
|
||||
retention: retention.RetentionConfig
|
||||
|
||||
config_classes: List = ...
|
||||
def __init__(self) -> None: ...
|
||||
|
||||
@@ -76,7 +76,7 @@ class AccountValidityConfig(Config):
|
||||
)
|
||||
|
||||
if self.account_validity_renew_by_email_enabled:
|
||||
if not self.public_baseurl:
|
||||
if not self.root.server.public_baseurl:
|
||||
raise ConfigError("Can't send renewal emails without 'public_baseurl'")
|
||||
|
||||
# Load account validity templates.
|
||||
|
||||
@@ -37,7 +37,7 @@ class CasConfig(Config):
|
||||
|
||||
# The public baseurl is required because it is used by the redirect
|
||||
# template.
|
||||
public_baseurl = self.public_baseurl
|
||||
public_baseurl = self.root.server.public_baseurl
|
||||
if not public_baseurl:
|
||||
raise ConfigError("cas_config requires a public_baseurl to be set")
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from os import path
|
||||
from typing import Optional
|
||||
|
||||
from synapse.config import ConfigError
|
||||
|
||||
@@ -78,8 +79,8 @@ class ConsentConfig(Config):
|
||||
def __init__(self, *args):
|
||||
super().__init__(*args)
|
||||
|
||||
self.user_consent_version = None
|
||||
self.user_consent_template_dir = None
|
||||
self.user_consent_version: Optional[str] = None
|
||||
self.user_consent_template_dir: Optional[str] = None
|
||||
self.user_consent_server_notice_content = None
|
||||
self.user_consent_server_notice_to_guests = False
|
||||
self.block_events_without_consent_error = None
|
||||
@@ -94,7 +95,9 @@ class ConsentConfig(Config):
|
||||
return
|
||||
self.user_consent_version = str(consent_config["version"])
|
||||
self.user_consent_template_dir = self.abspath(consent_config["template_dir"])
|
||||
if not path.isdir(self.user_consent_template_dir):
|
||||
if not isinstance(self.user_consent_template_dir, str) or not path.isdir(
|
||||
self.user_consent_template_dir
|
||||
):
|
||||
raise ConfigError(
|
||||
"Could not find template directory '%s'"
|
||||
% (self.user_consent_template_dir,)
|
||||
|
||||
@@ -19,7 +19,6 @@ import email.utils
|
||||
import logging
|
||||
import os
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
import attr
|
||||
|
||||
@@ -135,7 +134,7 @@ class EmailConfig(Config):
|
||||
# msisdn is currently always remote while Synapse does not support any method of
|
||||
# sending SMS messages
|
||||
ThreepidBehaviour.REMOTE
|
||||
if self.account_threepid_delegate_email
|
||||
if self.root.registration.account_threepid_delegate_email
|
||||
else ThreepidBehaviour.LOCAL
|
||||
)
|
||||
# Prior to Synapse v1.4.0, there was another option that defined whether Synapse would
|
||||
@@ -144,7 +143,7 @@ class EmailConfig(Config):
|
||||
# identity server in the process.
|
||||
self.using_identity_server_from_trusted_list = False
|
||||
if (
|
||||
not self.account_threepid_delegate_email
|
||||
not self.root.registration.account_threepid_delegate_email
|
||||
and config.get("trust_identity_server_for_password_resets", False) is True
|
||||
):
|
||||
# Use the first entry in self.trusted_third_party_id_servers instead
|
||||
@@ -156,7 +155,7 @@ class EmailConfig(Config):
|
||||
|
||||
# trusted_third_party_id_servers does not contain a scheme whereas
|
||||
# account_threepid_delegate_email is expected to. Presume https
|
||||
self.account_threepid_delegate_email: Optional[str] = (
|
||||
self.root.registration.account_threepid_delegate_email = (
|
||||
"https://" + first_trusted_identity_server
|
||||
)
|
||||
self.using_identity_server_from_trusted_list = True
|
||||
@@ -335,7 +334,7 @@ class EmailConfig(Config):
|
||||
"client_base_url", email_config.get("riot_base_url", None)
|
||||
)
|
||||
|
||||
if self.account_validity_renew_by_email_enabled:
|
||||
if self.root.account_validity.account_validity_renew_by_email_enabled:
|
||||
expiry_template_html = email_config.get(
|
||||
"expiry_template_html", "notice_expiry.html"
|
||||
)
|
||||
|
||||
@@ -24,6 +24,11 @@ class ExperimentalConfig(Config):
|
||||
def read_config(self, config: JsonDict, **kwargs):
|
||||
experimental = config.get("experimental_features") or {}
|
||||
|
||||
# Whether to enable experimental MSC1849 (aka relations) support
|
||||
self.msc1849_enabled = config.get("experimental_msc1849_support_enabled", True)
|
||||
# MSC3440 (thread relation)
|
||||
self.msc3440_enabled: bool = experimental.get("msc3440_enabled", False)
|
||||
|
||||
# MSC3026 (busy presence state)
|
||||
self.msc3026_enabled: bool = experimental.get("msc3026_enabled", False)
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ from .ratelimiting import RatelimitConfig
|
||||
from .redis import RedisConfig
|
||||
from .registration import RegistrationConfig
|
||||
from .repository import ContentRepositoryConfig
|
||||
from .retention import RetentionConfig
|
||||
from .room import RoomConfig
|
||||
from .room_directory import RoomDirectoryConfig
|
||||
from .saml2 import SAML2Config
|
||||
@@ -59,6 +60,7 @@ class HomeServerConfig(RootConfig):
|
||||
config_classes = [
|
||||
ModulesConfig,
|
||||
ServerConfig,
|
||||
RetentionConfig,
|
||||
TlsConfig,
|
||||
FederationConfig,
|
||||
CacheConfig,
|
||||
|
||||
@@ -145,11 +145,13 @@ class KeyConfig(Config):
|
||||
|
||||
# list of TrustedKeyServer objects
|
||||
self.key_servers = list(
|
||||
_parse_key_servers(key_servers, self.federation_verify_certificates)
|
||||
_parse_key_servers(
|
||||
key_servers, self.root.tls.federation_verify_certificates
|
||||
)
|
||||
)
|
||||
|
||||
self.macaroon_secret_key = config.get(
|
||||
"macaroon_secret_key", self.registration_shared_secret
|
||||
"macaroon_secret_key", self.root.registration.registration_shared_secret
|
||||
)
|
||||
|
||||
if not self.macaroon_secret_key:
|
||||
|
||||
@@ -18,6 +18,7 @@ import os
|
||||
import sys
|
||||
import threading
|
||||
from string import Template
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import yaml
|
||||
from zope.interface import implementer
|
||||
@@ -38,6 +39,9 @@ from synapse.util.versionstring import get_version_string
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
DEFAULT_LOG_CONFIG = Template(
|
||||
"""\
|
||||
# Log configuration for Synapse.
|
||||
@@ -306,7 +310,10 @@ def _reload_logging_config(log_config_path):
|
||||
|
||||
|
||||
def setup_logging(
|
||||
hs, config, use_worker_options=False, logBeginner: LogBeginner = globalLogBeginner
|
||||
hs: "HomeServer",
|
||||
config,
|
||||
use_worker_options=False,
|
||||
logBeginner: LogBeginner = globalLogBeginner,
|
||||
) -> None:
|
||||
"""
|
||||
Set up the logging subsystem.
|
||||
@@ -322,7 +329,9 @@ def setup_logging(
|
||||
|
||||
"""
|
||||
log_config_path = (
|
||||
config.worker_log_config if use_worker_options else config.log_config
|
||||
config.worker.worker_log_config
|
||||
if use_worker_options
|
||||
else config.logging.log_config
|
||||
)
|
||||
|
||||
# Perform one-time logging configuration.
|
||||
|
||||
@@ -58,7 +58,7 @@ class OIDCConfig(Config):
|
||||
"Multiple OIDC providers have the idp_id %r." % idp_id
|
||||
)
|
||||
|
||||
public_baseurl = self.public_baseurl
|
||||
public_baseurl = self.root.server.public_baseurl
|
||||
if public_baseurl is None:
|
||||
raise ConfigError("oidc_config requires a public_baseurl to be set")
|
||||
self.oidc_callback_url = public_baseurl + "_synapse/client/oidc/callback"
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Any, List
|
||||
from typing import Any, List, Tuple, Type
|
||||
|
||||
from synapse.util.module_loader import load_module
|
||||
|
||||
@@ -25,7 +25,30 @@ class PasswordAuthProviderConfig(Config):
|
||||
section = "authproviders"
|
||||
|
||||
def read_config(self, config, **kwargs):
|
||||
self.password_providers: List[Any] = []
|
||||
"""Parses the old password auth providers config. The config format looks like this:
|
||||
|
||||
password_providers:
|
||||
# Example config for an LDAP auth provider
|
||||
- module: "ldap_auth_provider.LdapAuthProvider"
|
||||
config:
|
||||
enabled: true
|
||||
uri: "ldap://ldap.example.com:389"
|
||||
start_tls: true
|
||||
base: "ou=users,dc=example,dc=com"
|
||||
attributes:
|
||||
uid: "cn"
|
||||
mail: "email"
|
||||
name: "givenName"
|
||||
#bind_dn:
|
||||
#bind_password:
|
||||
#filter: "(objectClass=posixAccount)"
|
||||
|
||||
We expect admins to use modules for this feature (which is why it doesn't appear
|
||||
in the sample config file), but we want to keep support for it around for a bit
|
||||
for backwards compatibility.
|
||||
"""
|
||||
|
||||
self.password_providers: List[Tuple[Type, Any]] = []
|
||||
providers = []
|
||||
|
||||
# We want to be backwards compatible with the old `ldap_config`
|
||||
@@ -49,33 +72,3 @@ class PasswordAuthProviderConfig(Config):
|
||||
)
|
||||
|
||||
self.password_providers.append((provider_class, provider_config))
|
||||
|
||||
def generate_config_section(self, **kwargs):
|
||||
return """\
|
||||
# Password providers allow homeserver administrators to integrate
|
||||
# their Synapse installation with existing authentication methods
|
||||
# ex. LDAP, external tokens, etc.
|
||||
#
|
||||
# For more information and known implementations, please see
|
||||
# https://matrix-org.github.io/synapse/latest/password_auth_providers.html
|
||||
#
|
||||
# Note: instances wishing to use SAML or CAS authentication should
|
||||
# instead use the `saml2_config` or `cas_config` options,
|
||||
# respectively.
|
||||
#
|
||||
password_providers:
|
||||
# # Example config for an LDAP auth provider
|
||||
# - module: "ldap_auth_provider.LdapAuthProvider"
|
||||
# config:
|
||||
# enabled: true
|
||||
# uri: "ldap://ldap.example.com:389"
|
||||
# start_tls: true
|
||||
# base: "ou=users,dc=example,dc=com"
|
||||
# attributes:
|
||||
# uid: "cn"
|
||||
# mail: "email"
|
||||
# name: "givenName"
|
||||
# #bind_dn:
|
||||
# #bind_password:
|
||||
# #filter: "(objectClass=posixAccount)"
|
||||
"""
|
||||
|
||||
@@ -45,7 +45,10 @@ class RegistrationConfig(Config):
|
||||
account_threepid_delegates = config.get("account_threepid_delegates") or {}
|
||||
self.account_threepid_delegate_email = account_threepid_delegates.get("email")
|
||||
self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")
|
||||
if self.account_threepid_delegate_msisdn and not self.public_baseurl:
|
||||
if (
|
||||
self.account_threepid_delegate_msisdn
|
||||
and not self.root.server.public_baseurl
|
||||
):
|
||||
raise ConfigError(
|
||||
"The configuration option `public_baseurl` is required if "
|
||||
"`account_threepid_delegate.msisdn` is set, such that "
|
||||
@@ -85,7 +88,7 @@ class RegistrationConfig(Config):
|
||||
if mxid_localpart:
|
||||
# Convert the localpart to a full mxid.
|
||||
self.auto_join_user_id = UserID(
|
||||
mxid_localpart, self.server_name
|
||||
mxid_localpart, self.root.server.server_name
|
||||
).to_string()
|
||||
|
||||
if self.autocreate_auto_join_rooms:
|
||||
|
||||
@@ -94,7 +94,7 @@ class ContentRepositoryConfig(Config):
|
||||
# Only enable the media repo if either the media repo is enabled or the
|
||||
# current worker app is the media repo.
|
||||
if (
|
||||
self.enable_media_repo is False
|
||||
self.root.server.enable_media_repo is False
|
||||
and config.get("worker_app") != "synapse.app.media_repository"
|
||||
):
|
||||
self.can_load_media_repo = False
|
||||
|
||||
226
synapse/config/retention.py
Normal file
226
synapse/config/retention.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.config._base import Config, ConfigError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class RetentionPurgeJob:
|
||||
"""Object describing the configuration of the manhole"""
|
||||
|
||||
interval: int
|
||||
shortest_max_lifetime: Optional[int]
|
||||
longest_max_lifetime: Optional[int]
|
||||
|
||||
|
||||
class RetentionConfig(Config):
|
||||
section = "retention"
|
||||
|
||||
def read_config(self, config, **kwargs):
|
||||
retention_config = config.get("retention")
|
||||
if retention_config is None:
|
||||
retention_config = {}
|
||||
|
||||
self.retention_enabled = retention_config.get("enabled", False)
|
||||
|
||||
retention_default_policy = retention_config.get("default_policy")
|
||||
|
||||
if retention_default_policy is not None:
|
||||
self.retention_default_min_lifetime = retention_default_policy.get(
|
||||
"min_lifetime"
|
||||
)
|
||||
if self.retention_default_min_lifetime is not None:
|
||||
self.retention_default_min_lifetime = self.parse_duration(
|
||||
self.retention_default_min_lifetime
|
||||
)
|
||||
|
||||
self.retention_default_max_lifetime = retention_default_policy.get(
|
||||
"max_lifetime"
|
||||
)
|
||||
if self.retention_default_max_lifetime is not None:
|
||||
self.retention_default_max_lifetime = self.parse_duration(
|
||||
self.retention_default_max_lifetime
|
||||
)
|
||||
|
||||
if (
|
||||
self.retention_default_min_lifetime is not None
|
||||
and self.retention_default_max_lifetime is not None
|
||||
and (
|
||||
self.retention_default_min_lifetime
|
||||
> self.retention_default_max_lifetime
|
||||
)
|
||||
):
|
||||
raise ConfigError(
|
||||
"The default retention policy's 'min_lifetime' can not be greater"
|
||||
" than its 'max_lifetime'"
|
||||
)
|
||||
else:
|
||||
self.retention_default_min_lifetime = None
|
||||
self.retention_default_max_lifetime = None
|
||||
|
||||
if self.retention_enabled:
|
||||
logger.info(
|
||||
"Message retention policies support enabled with the following default"
|
||||
" policy: min_lifetime = %s ; max_lifetime = %s",
|
||||
self.retention_default_min_lifetime,
|
||||
self.retention_default_max_lifetime,
|
||||
)
|
||||
|
||||
self.retention_allowed_lifetime_min = retention_config.get(
|
||||
"allowed_lifetime_min"
|
||||
)
|
||||
if self.retention_allowed_lifetime_min is not None:
|
||||
self.retention_allowed_lifetime_min = self.parse_duration(
|
||||
self.retention_allowed_lifetime_min
|
||||
)
|
||||
|
||||
self.retention_allowed_lifetime_max = retention_config.get(
|
||||
"allowed_lifetime_max"
|
||||
)
|
||||
if self.retention_allowed_lifetime_max is not None:
|
||||
self.retention_allowed_lifetime_max = self.parse_duration(
|
||||
self.retention_allowed_lifetime_max
|
||||
)
|
||||
|
||||
if (
|
||||
self.retention_allowed_lifetime_min is not None
|
||||
and self.retention_allowed_lifetime_max is not None
|
||||
and self.retention_allowed_lifetime_min
|
||||
> self.retention_allowed_lifetime_max
|
||||
):
|
||||
raise ConfigError(
|
||||
"Invalid retention policy limits: 'allowed_lifetime_min' can not be"
|
||||
" greater than 'allowed_lifetime_max'"
|
||||
)
|
||||
|
||||
self.retention_purge_jobs: List[RetentionPurgeJob] = []
|
||||
for purge_job_config in retention_config.get("purge_jobs", []):
|
||||
interval_config = purge_job_config.get("interval")
|
||||
|
||||
if interval_config is None:
|
||||
raise ConfigError(
|
||||
"A retention policy's purge jobs configuration must have the"
|
||||
" 'interval' key set."
|
||||
)
|
||||
|
||||
interval = self.parse_duration(interval_config)
|
||||
|
||||
shortest_max_lifetime = purge_job_config.get("shortest_max_lifetime")
|
||||
|
||||
if shortest_max_lifetime is not None:
|
||||
shortest_max_lifetime = self.parse_duration(shortest_max_lifetime)
|
||||
|
||||
longest_max_lifetime = purge_job_config.get("longest_max_lifetime")
|
||||
|
||||
if longest_max_lifetime is not None:
|
||||
longest_max_lifetime = self.parse_duration(longest_max_lifetime)
|
||||
|
||||
if (
|
||||
shortest_max_lifetime is not None
|
||||
and longest_max_lifetime is not None
|
||||
and shortest_max_lifetime > longest_max_lifetime
|
||||
):
|
||||
raise ConfigError(
|
||||
"A retention policy's purge jobs configuration's"
|
||||
" 'shortest_max_lifetime' value can not be greater than its"
|
||||
" 'longest_max_lifetime' value."
|
||||
)
|
||||
|
||||
self.retention_purge_jobs.append(
|
||||
RetentionPurgeJob(interval, shortest_max_lifetime, longest_max_lifetime)
|
||||
)
|
||||
|
||||
if not self.retention_purge_jobs:
|
||||
self.retention_purge_jobs = [
|
||||
RetentionPurgeJob(self.parse_duration("1d"), None, None)
|
||||
]
|
||||
|
||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||
return """\
|
||||
# Message retention policy at the server level.
|
||||
#
|
||||
# Room admins and mods can define a retention period for their rooms using the
|
||||
# 'm.room.retention' state event, and server admins can cap this period by setting
|
||||
# the 'allowed_lifetime_min' and 'allowed_lifetime_max' config options.
|
||||
#
|
||||
# If this feature is enabled, Synapse will regularly look for and purge events
|
||||
# which are older than the room's maximum retention period. Synapse will also
|
||||
# filter events received over federation so that events that should have been
|
||||
# purged are ignored and not stored again.
|
||||
#
|
||||
retention:
|
||||
# The message retention policies feature is disabled by default. Uncomment the
|
||||
# following line to enable it.
|
||||
#
|
||||
#enabled: true
|
||||
|
||||
# Default retention policy. If set, Synapse will apply it to rooms that lack the
|
||||
# 'm.room.retention' state event. Currently, the value of 'min_lifetime' doesn't
|
||||
# matter much because Synapse doesn't take it into account yet.
|
||||
#
|
||||
#default_policy:
|
||||
# min_lifetime: 1d
|
||||
# max_lifetime: 1y
|
||||
|
||||
# Retention policy limits. If set, and the state of a room contains a
|
||||
# 'm.room.retention' event in its state which contains a 'min_lifetime' or a
|
||||
# 'max_lifetime' that's out of these bounds, Synapse will cap the room's policy
|
||||
# to these limits when running purge jobs.
|
||||
#
|
||||
#allowed_lifetime_min: 1d
|
||||
#allowed_lifetime_max: 1y
|
||||
|
||||
# Server admins can define the settings of the background jobs purging the
|
||||
# events which lifetime has expired under the 'purge_jobs' section.
|
||||
#
|
||||
# If no configuration is provided, a single job will be set up to delete expired
|
||||
# events in every room daily.
|
||||
#
|
||||
# Each job's configuration defines which range of message lifetimes the job
|
||||
# takes care of. For example, if 'shortest_max_lifetime' is '2d' and
|
||||
# 'longest_max_lifetime' is '3d', the job will handle purging expired events in
|
||||
# rooms whose state defines a 'max_lifetime' that's both higher than 2 days, and
|
||||
# lower than or equal to 3 days. Both the minimum and the maximum value of a
|
||||
# range are optional, e.g. a job with no 'shortest_max_lifetime' and a
|
||||
# 'longest_max_lifetime' of '3d' will handle every room with a retention policy
|
||||
# which 'max_lifetime' is lower than or equal to three days.
|
||||
#
|
||||
# The rationale for this per-job configuration is that some rooms might have a
|
||||
# retention policy with a low 'max_lifetime', where history needs to be purged
|
||||
# of outdated messages on a more frequent basis than for the rest of the rooms
|
||||
# (e.g. every 12h), but not want that purge to be performed by a job that's
|
||||
# iterating over every room it knows, which could be heavy on the server.
|
||||
#
|
||||
# If any purge job is configured, it is strongly recommended to have at least
|
||||
# a single job with neither 'shortest_max_lifetime' nor 'longest_max_lifetime'
|
||||
# set, or one job without 'shortest_max_lifetime' and one job without
|
||||
# 'longest_max_lifetime' set. Otherwise some rooms might be ignored, even if
|
||||
# 'allowed_lifetime_min' and 'allowed_lifetime_max' are set, because capping a
|
||||
# room's policy to these values is done after the policies are retrieved from
|
||||
# Synapse's database (which is done using the range specified in a purge job's
|
||||
# configuration).
|
||||
#
|
||||
#purge_jobs:
|
||||
# - longest_max_lifetime: 3d
|
||||
# interval: 12h
|
||||
# - shortest_max_lifetime: 3d
|
||||
# interval: 1d
|
||||
"""
|
||||
@@ -199,7 +199,7 @@ class SAML2Config(Config):
|
||||
"""
|
||||
import saml2
|
||||
|
||||
public_baseurl = self.public_baseurl
|
||||
public_baseurl = self.root.server.public_baseurl
|
||||
if public_baseurl is None:
|
||||
raise ConfigError("saml2_config requires a public_baseurl to be set")
|
||||
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2017-2018 New Vector Ltd
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -19,7 +17,7 @@ import logging
|
||||
import os.path
|
||||
import re
|
||||
from textwrap import indent
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import attr
|
||||
import yaml
|
||||
@@ -184,49 +182,65 @@ KNOWN_RESOURCES = {
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class HttpResourceConfig:
|
||||
names = attr.ib(
|
||||
type=List[str],
|
||||
names: List[str] = attr.ib(
|
||||
factory=list,
|
||||
validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), # type: ignore
|
||||
)
|
||||
compress = attr.ib(
|
||||
type=bool,
|
||||
compress: bool = attr.ib(
|
||||
default=False,
|
||||
validator=attr.validators.optional(attr.validators.instance_of(bool)), # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class HttpListenerConfig:
|
||||
"""Object describing the http-specific parts of the config of a listener"""
|
||||
|
||||
x_forwarded = attr.ib(type=bool, default=False)
|
||||
resources = attr.ib(type=List[HttpResourceConfig], factory=list)
|
||||
additional_resources = attr.ib(type=Dict[str, dict], factory=dict)
|
||||
tag = attr.ib(type=str, default=None)
|
||||
x_forwarded: bool = False
|
||||
resources: List[HttpResourceConfig] = attr.ib(factory=list)
|
||||
additional_resources: Dict[str, dict] = attr.ib(factory=dict)
|
||||
tag: Optional[str] = None
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class ListenerConfig:
|
||||
"""Object describing the configuration of a single listener."""
|
||||
|
||||
port = attr.ib(type=int, validator=attr.validators.instance_of(int))
|
||||
bind_addresses = attr.ib(type=List[str])
|
||||
type = attr.ib(type=str, validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
|
||||
tls = attr.ib(type=bool, default=False)
|
||||
port: int = attr.ib(validator=attr.validators.instance_of(int))
|
||||
bind_addresses: List[str]
|
||||
type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES))
|
||||
tls: bool = False
|
||||
|
||||
# http_options is only populated if type=http
|
||||
http_options = attr.ib(type=Optional[HttpListenerConfig], default=None)
|
||||
http_options: Optional[HttpListenerConfig] = None
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class ManholeConfig:
|
||||
"""Object describing the configuration of the manhole"""
|
||||
|
||||
username = attr.ib(type=str, validator=attr.validators.instance_of(str))
|
||||
password = attr.ib(type=str, validator=attr.validators.instance_of(str))
|
||||
priv_key = attr.ib(type=Optional[Key])
|
||||
pub_key = attr.ib(type=Optional[Key])
|
||||
username: str = attr.ib(validator=attr.validators.instance_of(str))
|
||||
password: str = attr.ib(validator=attr.validators.instance_of(str))
|
||||
priv_key: Optional[Key]
|
||||
pub_key: Optional[Key]
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class LimitRemoteRoomsConfig:
|
||||
enabled: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
|
||||
complexity: Union[float, int] = attr.ib(
|
||||
validator=attr.validators.instance_of(
|
||||
(float, int) # type: ignore[arg-type] # noqa
|
||||
),
|
||||
default=1.0,
|
||||
)
|
||||
complexity_error: str = attr.ib(
|
||||
validator=attr.validators.instance_of(str),
|
||||
default=ROOM_COMPLEXITY_TOO_GREAT,
|
||||
)
|
||||
admins_can_join: bool = attr.ib(
|
||||
validator=attr.validators.instance_of(bool), default=False
|
||||
)
|
||||
|
||||
|
||||
class ServerConfig(Config):
|
||||
@@ -353,11 +367,6 @@ class ServerConfig(Config):
|
||||
# (other than those sent by local server admins)
|
||||
self.block_non_admin_invites = config.get("block_non_admin_invites", False)
|
||||
|
||||
# Whether to enable experimental MSC1849 (aka relations) support
|
||||
self.experimental_msc1849_support_enabled = config.get(
|
||||
"experimental_msc1849_support_enabled", True
|
||||
)
|
||||
|
||||
# Options to control access by tracking MAU
|
||||
self.limit_usage_by_mau = config.get("limit_usage_by_mau", False)
|
||||
self.max_mau_value = 0
|
||||
@@ -443,132 +452,6 @@ class ServerConfig(Config):
|
||||
# events with profile information that differ from the target's global profile.
|
||||
self.allow_per_room_profiles = config.get("allow_per_room_profiles", True)
|
||||
|
||||
retention_config = config.get("retention")
|
||||
if retention_config is None:
|
||||
retention_config = {}
|
||||
|
||||
self.retention_enabled = retention_config.get("enabled", False)
|
||||
|
||||
retention_default_policy = retention_config.get("default_policy")
|
||||
|
||||
if retention_default_policy is not None:
|
||||
self.retention_default_min_lifetime = retention_default_policy.get(
|
||||
"min_lifetime"
|
||||
)
|
||||
if self.retention_default_min_lifetime is not None:
|
||||
self.retention_default_min_lifetime = self.parse_duration(
|
||||
self.retention_default_min_lifetime
|
||||
)
|
||||
|
||||
self.retention_default_max_lifetime = retention_default_policy.get(
|
||||
"max_lifetime"
|
||||
)
|
||||
if self.retention_default_max_lifetime is not None:
|
||||
self.retention_default_max_lifetime = self.parse_duration(
|
||||
self.retention_default_max_lifetime
|
||||
)
|
||||
|
||||
if (
|
||||
self.retention_default_min_lifetime is not None
|
||||
and self.retention_default_max_lifetime is not None
|
||||
and (
|
||||
self.retention_default_min_lifetime
|
||||
> self.retention_default_max_lifetime
|
||||
)
|
||||
):
|
||||
raise ConfigError(
|
||||
"The default retention policy's 'min_lifetime' can not be greater"
|
||||
" than its 'max_lifetime'"
|
||||
)
|
||||
else:
|
||||
self.retention_default_min_lifetime = None
|
||||
self.retention_default_max_lifetime = None
|
||||
|
||||
if self.retention_enabled:
|
||||
logger.info(
|
||||
"Message retention policies support enabled with the following default"
|
||||
" policy: min_lifetime = %s ; max_lifetime = %s",
|
||||
self.retention_default_min_lifetime,
|
||||
self.retention_default_max_lifetime,
|
||||
)
|
||||
|
||||
self.retention_allowed_lifetime_min = retention_config.get(
|
||||
"allowed_lifetime_min"
|
||||
)
|
||||
if self.retention_allowed_lifetime_min is not None:
|
||||
self.retention_allowed_lifetime_min = self.parse_duration(
|
||||
self.retention_allowed_lifetime_min
|
||||
)
|
||||
|
||||
self.retention_allowed_lifetime_max = retention_config.get(
|
||||
"allowed_lifetime_max"
|
||||
)
|
||||
if self.retention_allowed_lifetime_max is not None:
|
||||
self.retention_allowed_lifetime_max = self.parse_duration(
|
||||
self.retention_allowed_lifetime_max
|
||||
)
|
||||
|
||||
if (
|
||||
self.retention_allowed_lifetime_min is not None
|
||||
and self.retention_allowed_lifetime_max is not None
|
||||
and self.retention_allowed_lifetime_min
|
||||
> self.retention_allowed_lifetime_max
|
||||
):
|
||||
raise ConfigError(
|
||||
"Invalid retention policy limits: 'allowed_lifetime_min' can not be"
|
||||
" greater than 'allowed_lifetime_max'"
|
||||
)
|
||||
|
||||
self.retention_purge_jobs: List[Dict[str, Optional[int]]] = []
|
||||
for purge_job_config in retention_config.get("purge_jobs", []):
|
||||
interval_config = purge_job_config.get("interval")
|
||||
|
||||
if interval_config is None:
|
||||
raise ConfigError(
|
||||
"A retention policy's purge jobs configuration must have the"
|
||||
" 'interval' key set."
|
||||
)
|
||||
|
||||
interval = self.parse_duration(interval_config)
|
||||
|
||||
shortest_max_lifetime = purge_job_config.get("shortest_max_lifetime")
|
||||
|
||||
if shortest_max_lifetime is not None:
|
||||
shortest_max_lifetime = self.parse_duration(shortest_max_lifetime)
|
||||
|
||||
longest_max_lifetime = purge_job_config.get("longest_max_lifetime")
|
||||
|
||||
if longest_max_lifetime is not None:
|
||||
longest_max_lifetime = self.parse_duration(longest_max_lifetime)
|
||||
|
||||
if (
|
||||
shortest_max_lifetime is not None
|
||||
and longest_max_lifetime is not None
|
||||
and shortest_max_lifetime > longest_max_lifetime
|
||||
):
|
||||
raise ConfigError(
|
||||
"A retention policy's purge jobs configuration's"
|
||||
" 'shortest_max_lifetime' value can not be greater than its"
|
||||
" 'longest_max_lifetime' value."
|
||||
)
|
||||
|
||||
self.retention_purge_jobs.append(
|
||||
{
|
||||
"interval": interval,
|
||||
"shortest_max_lifetime": shortest_max_lifetime,
|
||||
"longest_max_lifetime": longest_max_lifetime,
|
||||
}
|
||||
)
|
||||
|
||||
if not self.retention_purge_jobs:
|
||||
self.retention_purge_jobs = [
|
||||
{
|
||||
"interval": self.parse_duration("1d"),
|
||||
"shortest_max_lifetime": None,
|
||||
"longest_max_lifetime": None,
|
||||
}
|
||||
]
|
||||
|
||||
self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])]
|
||||
|
||||
# no_tls is not really supported any more, but let's grandfather it in
|
||||
@@ -591,25 +474,6 @@ class ServerConfig(Config):
|
||||
self.gc_thresholds = read_gc_thresholds(config.get("gc_thresholds", None))
|
||||
self.gc_seconds = self.read_gc_intervals(config.get("gc_min_interval", None))
|
||||
|
||||
@attr.s
|
||||
class LimitRemoteRoomsConfig:
|
||||
enabled = attr.ib(
|
||||
validator=attr.validators.instance_of(bool), default=False
|
||||
)
|
||||
complexity = attr.ib(
|
||||
validator=attr.validators.instance_of(
|
||||
(float, int) # type: ignore[arg-type] # noqa
|
||||
),
|
||||
default=1.0,
|
||||
)
|
||||
complexity_error = attr.ib(
|
||||
validator=attr.validators.instance_of(str),
|
||||
default=ROOM_COMPLEXITY_TOO_GREAT,
|
||||
)
|
||||
admins_can_join = attr.ib(
|
||||
validator=attr.validators.instance_of(bool), default=False
|
||||
)
|
||||
|
||||
self.limit_remote_rooms = LimitRemoteRoomsConfig(
|
||||
**(config.get("limit_remote_rooms") or {})
|
||||
)
|
||||
@@ -1259,75 +1123,6 @@ class ServerConfig(Config):
|
||||
#
|
||||
#user_ips_max_age: 14d
|
||||
|
||||
# Message retention policy at the server level.
|
||||
#
|
||||
# Room admins and mods can define a retention period for their rooms using the
|
||||
# 'm.room.retention' state event, and server admins can cap this period by setting
|
||||
# the 'allowed_lifetime_min' and 'allowed_lifetime_max' config options.
|
||||
#
|
||||
# If this feature is enabled, Synapse will regularly look for and purge events
|
||||
# which are older than the room's maximum retention period. Synapse will also
|
||||
# filter events received over federation so that events that should have been
|
||||
# purged are ignored and not stored again.
|
||||
#
|
||||
retention:
|
||||
# The message retention policies feature is disabled by default. Uncomment the
|
||||
# following line to enable it.
|
||||
#
|
||||
#enabled: true
|
||||
|
||||
# Default retention policy. If set, Synapse will apply it to rooms that lack the
|
||||
# 'm.room.retention' state event. Currently, the value of 'min_lifetime' doesn't
|
||||
# matter much because Synapse doesn't take it into account yet.
|
||||
#
|
||||
#default_policy:
|
||||
# min_lifetime: 1d
|
||||
# max_lifetime: 1y
|
||||
|
||||
# Retention policy limits. If set, and the state of a room contains a
|
||||
# 'm.room.retention' event in its state which contains a 'min_lifetime' or a
|
||||
# 'max_lifetime' that's out of these bounds, Synapse will cap the room's policy
|
||||
# to these limits when running purge jobs.
|
||||
#
|
||||
#allowed_lifetime_min: 1d
|
||||
#allowed_lifetime_max: 1y
|
||||
|
||||
# Server admins can define the settings of the background jobs purging the
|
||||
# events which lifetime has expired under the 'purge_jobs' section.
|
||||
#
|
||||
# If no configuration is provided, a single job will be set up to delete expired
|
||||
# events in every room daily.
|
||||
#
|
||||
# Each job's configuration defines which range of message lifetimes the job
|
||||
# takes care of. For example, if 'shortest_max_lifetime' is '2d' and
|
||||
# 'longest_max_lifetime' is '3d', the job will handle purging expired events in
|
||||
# rooms whose state defines a 'max_lifetime' that's both higher than 2 days, and
|
||||
# lower than or equal to 3 days. Both the minimum and the maximum value of a
|
||||
# range are optional, e.g. a job with no 'shortest_max_lifetime' and a
|
||||
# 'longest_max_lifetime' of '3d' will handle every room with a retention policy
|
||||
# which 'max_lifetime' is lower than or equal to three days.
|
||||
#
|
||||
# The rationale for this per-job configuration is that some rooms might have a
|
||||
# retention policy with a low 'max_lifetime', where history needs to be purged
|
||||
# of outdated messages on a more frequent basis than for the rest of the rooms
|
||||
# (e.g. every 12h), but not want that purge to be performed by a job that's
|
||||
# iterating over every room it knows, which could be heavy on the server.
|
||||
#
|
||||
# If any purge job is configured, it is strongly recommended to have at least
|
||||
# a single job with neither 'shortest_max_lifetime' nor 'longest_max_lifetime'
|
||||
# set, or one job without 'shortest_max_lifetime' and one job without
|
||||
# 'longest_max_lifetime' set. Otherwise some rooms might be ignored, even if
|
||||
# 'allowed_lifetime_min' and 'allowed_lifetime_max' are set, because capping a
|
||||
# room's policy to these values is done after the policies are retrieved from
|
||||
# Synapse's database (which is done using the range specified in a purge job's
|
||||
# configuration).
|
||||
#
|
||||
#purge_jobs:
|
||||
# - longest_max_lifetime: 3d
|
||||
# interval: 12h
|
||||
# - shortest_max_lifetime: 3d
|
||||
# interval: 1d
|
||||
|
||||
# Inhibits the /requestToken endpoints from returning an error that might leak
|
||||
# information about whether an e-mail address is in use or not on this
|
||||
# homeserver.
|
||||
@@ -1447,7 +1242,7 @@ def read_gc_thresholds(thresholds):
|
||||
return None
|
||||
try:
|
||||
assert len(thresholds) == 3
|
||||
return (int(thresholds[0]), int(thresholds[1]), int(thresholds[2]))
|
||||
return int(thresholds[0]), int(thresholds[1]), int(thresholds[2])
|
||||
except Exception:
|
||||
raise ConfigError(
|
||||
"Value of `gc_threshold` must be a list of three integers if set"
|
||||
|
||||
@@ -73,7 +73,9 @@ class ServerNoticesConfig(Config):
|
||||
return
|
||||
|
||||
mxid_localpart = c["system_mxid_localpart"]
|
||||
self.server_notices_mxid = UserID(mxid_localpart, self.server_name).to_string()
|
||||
self.server_notices_mxid = UserID(
|
||||
mxid_localpart, self.root.server.server_name
|
||||
).to_string()
|
||||
self.server_notices_mxid_display_name = c.get("system_mxid_display_name", None)
|
||||
self.server_notices_mxid_avatar_url = c.get("system_mxid_avatar_url", None)
|
||||
# todo: i18n
|
||||
|
||||
@@ -103,8 +103,10 @@ class SSOConfig(Config):
|
||||
# the client's.
|
||||
# public_baseurl is an optional setting, so we only add the fallback's URL to the
|
||||
# list if it's provided (because we can't figure out what that URL is otherwise).
|
||||
if self.public_baseurl:
|
||||
login_fallback_url = self.public_baseurl + "_matrix/static/client/login"
|
||||
if self.root.server.public_baseurl:
|
||||
login_fallback_url = (
|
||||
self.root.server.public_baseurl + "_matrix/static/client/login"
|
||||
)
|
||||
self.sso_client_whitelist.append(login_fallback_url)
|
||||
|
||||
def generate_config_section(self, **kwargs):
|
||||
|
||||
@@ -172,9 +172,12 @@ class TlsConfig(Config):
|
||||
)
|
||||
|
||||
# YYYYMMDDhhmmssZ -- in UTC
|
||||
expires_on = datetime.strptime(
|
||||
tls_certificate.get_notAfter().decode("ascii"), "%Y%m%d%H%M%SZ"
|
||||
)
|
||||
expiry_data = tls_certificate.get_notAfter()
|
||||
if expiry_data is None:
|
||||
raise ValueError(
|
||||
"TLS Certificate has no expiry date, and this is not permitted"
|
||||
)
|
||||
expires_on = datetime.strptime(expiry_data.decode("ascii"), "%Y%m%d%H%M%SZ")
|
||||
now = datetime.utcnow()
|
||||
days_remaining = (expires_on - now).days
|
||||
return days_remaining
|
||||
|
||||
@@ -45,12 +45,16 @@ class UserDirectoryConfig(Config):
|
||||
#enabled: false
|
||||
|
||||
# Defines whether to search all users visible to your HS when searching
|
||||
# the user directory, rather than limiting to users visible in public
|
||||
# rooms. Defaults to false.
|
||||
# the user directory. If false, search results will only contain users
|
||||
# visible in public rooms and users sharing a room with the requester.
|
||||
# Defaults to false.
|
||||
#
|
||||
# If you set it true, you'll have to rebuild the user_directory search
|
||||
# indexes, see:
|
||||
# https://matrix-org.github.io/synapse/latest/user_directory.html
|
||||
# NB. If you set this to true, and the last time the user_directory search
|
||||
# indexes were (re)built was before Synapse 1.44, you'll have to
|
||||
# rebuild the indexes in order to search through all known users.
|
||||
# These indexes are built the first time Synapse starts; admins can
|
||||
# manually trigger a rebuild following the instructions at
|
||||
# https://matrix-org.github.io/synapse/latest/user_directory.html
|
||||
#
|
||||
# Uncomment to return search results containing all known users, even if that
|
||||
# user does not share a room with the requester.
|
||||
|
||||
@@ -29,9 +29,12 @@ from twisted.internet.ssl import (
|
||||
TLSVersion,
|
||||
platformTrust,
|
||||
)
|
||||
from twisted.protocols.tls import TLSMemoryBIOProtocol
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.web.iweb import IPolicyForHTTPS
|
||||
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -51,7 +54,7 @@ class ServerContextFactory(ContextFactory):
|
||||
per https://github.com/matrix-org/synapse/issues/1691
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
def __init__(self, config: HomeServerConfig):
|
||||
# TODO: once pyOpenSSL exposes TLS_METHOD and SSL_CTX_set_min_proto_version,
|
||||
# switch to those (see https://github.com/pyca/cryptography/issues/5379).
|
||||
#
|
||||
@@ -64,7 +67,7 @@ class ServerContextFactory(ContextFactory):
|
||||
self.configure_context(self._context, config)
|
||||
|
||||
@staticmethod
|
||||
def configure_context(context, config):
|
||||
def configure_context(context: SSL.Context, config: HomeServerConfig) -> None:
|
||||
try:
|
||||
_ecCurve = crypto.get_elliptic_curve(_defaultCurveName)
|
||||
context.set_tmp_ecdh(_ecCurve)
|
||||
@@ -74,15 +77,16 @@ class ServerContextFactory(ContextFactory):
|
||||
context.set_options(
|
||||
SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL.OP_NO_TLSv1 | SSL.OP_NO_TLSv1_1
|
||||
)
|
||||
context.use_certificate_chain_file(config.tls_certificate_file)
|
||||
context.use_privatekey(config.tls_private_key)
|
||||
context.use_certificate_chain_file(config.tls.tls_certificate_file)
|
||||
assert config.tls.tls_private_key is not None
|
||||
context.use_privatekey(config.tls.tls_private_key)
|
||||
|
||||
# https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||
context.set_cipher_list(
|
||||
"ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1:!AESCCM"
|
||||
b"ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1:!AESCCM"
|
||||
)
|
||||
|
||||
def getContext(self):
|
||||
def getContext(self) -> SSL.Context:
|
||||
return self._context
|
||||
|
||||
|
||||
@@ -98,11 +102,11 @@ class FederationPolicyForHTTPS:
|
||||
constructs an SSLClientConnectionCreator factory accordingly.
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
def __init__(self, config: HomeServerConfig):
|
||||
self._config = config
|
||||
|
||||
# Check if we're using a custom list of a CA certificates
|
||||
trust_root = config.federation_ca_trust_root
|
||||
trust_root = config.tls.federation_ca_trust_root
|
||||
if trust_root is None:
|
||||
# Use CA root certs provided by OpenSSL
|
||||
trust_root = platformTrust()
|
||||
@@ -113,7 +117,7 @@ class FederationPolicyForHTTPS:
|
||||
# moving to TLS 1.2 by default, we want to respect the config option if
|
||||
# it is set to 1.0 (which the alternate option, raiseMinimumTo, will not
|
||||
# let us do).
|
||||
minTLS = _TLS_VERSION_MAP[config.federation_client_minimum_tls_version]
|
||||
minTLS = _TLS_VERSION_MAP[config.tls.federation_client_minimum_tls_version]
|
||||
|
||||
_verify_ssl = CertificateOptions(
|
||||
trustRoot=trust_root, insecurelyLowerMinimumTo=minTLS
|
||||
@@ -125,13 +129,13 @@ class FederationPolicyForHTTPS:
|
||||
self._no_verify_ssl_context = _no_verify_ssl.getContext()
|
||||
self._no_verify_ssl_context.set_info_callback(_context_info_cb)
|
||||
|
||||
self._should_verify = self._config.federation_verify_certificates
|
||||
self._should_verify = self._config.tls.federation_verify_certificates
|
||||
|
||||
self._federation_certificate_verification_whitelist = (
|
||||
self._config.federation_certificate_verification_whitelist
|
||||
self._config.tls.federation_certificate_verification_whitelist
|
||||
)
|
||||
|
||||
def get_options(self, host: bytes):
|
||||
def get_options(self, host: bytes) -> IOpenSSLClientConnectionCreator:
|
||||
# IPolicyForHTTPS.get_options takes bytes, but we want to compare
|
||||
# against the str whitelist. The hostnames in the whitelist are already
|
||||
# IDNA-encoded like the hosts will be here.
|
||||
@@ -153,7 +157,9 @@ class FederationPolicyForHTTPS:
|
||||
|
||||
return SSLClientConnectionCreator(host, ssl_context, should_verify)
|
||||
|
||||
def creatorForNetloc(self, hostname, port):
|
||||
def creatorForNetloc(
|
||||
self, hostname: bytes, port: int
|
||||
) -> IOpenSSLClientConnectionCreator:
|
||||
"""Implements the IPolicyForHTTPS interface so that this can be passed
|
||||
directly to agents.
|
||||
"""
|
||||
@@ -169,16 +175,18 @@ class RegularPolicyForHTTPS:
|
||||
trust root.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
trust_root = platformTrust()
|
||||
self._ssl_context = CertificateOptions(trustRoot=trust_root).getContext()
|
||||
self._ssl_context.set_info_callback(_context_info_cb)
|
||||
|
||||
def creatorForNetloc(self, hostname, port):
|
||||
def creatorForNetloc(
|
||||
self, hostname: bytes, port: int
|
||||
) -> IOpenSSLClientConnectionCreator:
|
||||
return SSLClientConnectionCreator(hostname, self._ssl_context, True)
|
||||
|
||||
|
||||
def _context_info_cb(ssl_connection, where, ret):
|
||||
def _context_info_cb(ssl_connection: SSL.Connection, where: int, ret: int) -> None:
|
||||
"""The 'information callback' for our openssl context objects.
|
||||
|
||||
Note: Once this is set as the info callback on a Context object, the Context should
|
||||
@@ -204,11 +212,13 @@ class SSLClientConnectionCreator:
|
||||
Replaces twisted.internet.ssl.ClientTLSOptions
|
||||
"""
|
||||
|
||||
def __init__(self, hostname: bytes, ctx, verify_certs: bool):
|
||||
def __init__(self, hostname: bytes, ctx: SSL.Context, verify_certs: bool):
|
||||
self._ctx = ctx
|
||||
self._verifier = ConnectionVerifier(hostname, verify_certs)
|
||||
|
||||
def clientConnectionForTLS(self, tls_protocol):
|
||||
def clientConnectionForTLS(
|
||||
self, tls_protocol: TLSMemoryBIOProtocol
|
||||
) -> SSL.Connection:
|
||||
context = self._ctx
|
||||
connection = SSL.Connection(context, None)
|
||||
|
||||
@@ -219,7 +229,7 @@ class SSLClientConnectionCreator:
|
||||
# ... and we also gut-wrench a '_synapse_tls_verifier' attribute into the
|
||||
# tls_protocol so that the SSL context's info callback has something to
|
||||
# call to do the cert verification.
|
||||
tls_protocol._synapse_tls_verifier = self._verifier
|
||||
tls_protocol._synapse_tls_verifier = self._verifier # type: ignore[attr-defined]
|
||||
return connection
|
||||
|
||||
|
||||
@@ -244,7 +254,9 @@ class ConnectionVerifier:
|
||||
self._hostnameBytes = hostname
|
||||
self._hostnameASCII = self._hostnameBytes.decode("ascii")
|
||||
|
||||
def verify_context_info_cb(self, ssl_connection, where):
|
||||
def verify_context_info_cb(
|
||||
self, ssl_connection: SSL.Connection, where: int
|
||||
) -> None:
|
||||
if where & SSL.SSL_CB_HANDSHAKE_START and not self._is_ip_address:
|
||||
ssl_connection.set_tlsext_host_name(self._hostnameBytes)
|
||||
|
||||
|
||||
@@ -100,7 +100,7 @@ def compute_content_hash(
|
||||
|
||||
|
||||
def compute_event_reference_hash(
|
||||
event, hash_algorithm: Hasher = hashlib.sha256
|
||||
event: EventBase, hash_algorithm: Hasher = hashlib.sha256
|
||||
) -> Tuple[str, bytes]:
|
||||
"""Computes the event reference hash. This is the hash of the redacted
|
||||
event.
|
||||
|
||||
@@ -87,7 +87,7 @@ class VerifyJsonRequest:
|
||||
server_name: str,
|
||||
json_object: JsonDict,
|
||||
minimum_valid_until_ms: int,
|
||||
):
|
||||
) -> "VerifyJsonRequest":
|
||||
"""Create a VerifyJsonRequest to verify all signatures on a signed JSON
|
||||
object for the given server.
|
||||
"""
|
||||
@@ -104,7 +104,7 @@ class VerifyJsonRequest:
|
||||
server_name: str,
|
||||
event: EventBase,
|
||||
minimum_valid_until_ms: int,
|
||||
):
|
||||
) -> "VerifyJsonRequest":
|
||||
"""Create a VerifyJsonRequest to verify all signatures on an event
|
||||
object for the given server.
|
||||
"""
|
||||
@@ -449,7 +449,9 @@ class StoreKeyFetcher(KeyFetcher):
|
||||
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def _fetch_keys(self, keys_to_fetch: List[_FetchKeyRequest]):
|
||||
async def _fetch_keys(
|
||||
self, keys_to_fetch: List[_FetchKeyRequest]
|
||||
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
||||
key_ids_to_fetch = (
|
||||
(queue_value.server_name, key_id)
|
||||
for queue_value in keys_to_fetch
|
||||
@@ -572,7 +574,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
||||
super().__init__(hs)
|
||||
self.clock = hs.get_clock()
|
||||
self.client = hs.get_federation_http_client()
|
||||
self.key_servers = self.config.key_servers
|
||||
self.key_servers = self.config.key.key_servers
|
||||
|
||||
async def _fetch_keys(
|
||||
self, keys_to_fetch: List[_FetchKeyRequest]
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
from signedjson.key import decode_verify_key_bytes
|
||||
@@ -41,43 +41,111 @@ from synapse.types import StateMap, UserID, get_domain_from_id
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check(
|
||||
room_version_obj: RoomVersion,
|
||||
event: EventBase,
|
||||
auth_events: StateMap[EventBase],
|
||||
do_sig_check: bool = True,
|
||||
do_size_check: bool = True,
|
||||
def validate_event_for_room_version(
|
||||
room_version_obj: RoomVersion, event: EventBase
|
||||
) -> None:
|
||||
"""Checks if this event is correctly authed.
|
||||
"""Ensure that the event complies with the limits, and has the right signatures
|
||||
|
||||
NB: does not *validate* the signatures - it assumes that any signatures present
|
||||
have already been checked.
|
||||
|
||||
NB: it does not check that the event satisfies the auth rules (that is done in
|
||||
check_auth_rules_for_event) - these tests are independent of the rest of the state
|
||||
in the room.
|
||||
|
||||
NB: This is used to check events that have been received over federation. As such,
|
||||
it can only enforce the checks specified in the relevant room version, to avoid
|
||||
a split-brain situation where some servers accept such events, and others reject
|
||||
them.
|
||||
|
||||
TODO: consider moving this into EventValidator
|
||||
|
||||
Args:
|
||||
room_version_obj: the version of the room
|
||||
event: the event being checked.
|
||||
auth_events: the existing room state.
|
||||
do_sig_check: True if it should be verified that the sending server
|
||||
signed the event.
|
||||
do_size_check: True if the size of the event fields should be verified.
|
||||
room_version_obj: the version of the room which contains this event
|
||||
event: the event to be checked
|
||||
|
||||
Raises:
|
||||
AuthError if the checks fail
|
||||
|
||||
Returns:
|
||||
if the auth checks pass.
|
||||
SynapseError if there is a problem with the event
|
||||
"""
|
||||
assert isinstance(auth_events, dict)
|
||||
|
||||
if do_size_check:
|
||||
_check_size_limits(event)
|
||||
_check_size_limits(event)
|
||||
|
||||
if not hasattr(event, "room_id"):
|
||||
raise AuthError(500, "Event has no room_id: %s" % event)
|
||||
|
||||
room_id = event.room_id
|
||||
# check that the event has the correct signatures
|
||||
sender_domain = get_domain_from_id(event.sender)
|
||||
|
||||
is_invite_via_3pid = (
|
||||
event.type == EventTypes.Member
|
||||
and event.membership == Membership.INVITE
|
||||
and "third_party_invite" in event.content
|
||||
)
|
||||
|
||||
# Check the sender's domain has signed the event
|
||||
if not event.signatures.get(sender_domain):
|
||||
# We allow invites via 3pid to have a sender from a different
|
||||
# HS, as the sender must match the sender of the original
|
||||
# 3pid invite. This is checked further down with the
|
||||
# other dedicated membership checks.
|
||||
if not is_invite_via_3pid:
|
||||
raise AuthError(403, "Event not signed by sender's server")
|
||||
|
||||
if event.format_version in (EventFormatVersions.V1,):
|
||||
# Only older room versions have event IDs to check.
|
||||
event_id_domain = get_domain_from_id(event.event_id)
|
||||
|
||||
# Check the origin domain has signed the event
|
||||
if not event.signatures.get(event_id_domain):
|
||||
raise AuthError(403, "Event not signed by sending server")
|
||||
|
||||
is_invite_via_allow_rule = (
|
||||
room_version_obj.msc3083_join_rules
|
||||
and event.type == EventTypes.Member
|
||||
and event.membership == Membership.JOIN
|
||||
and EventContentFields.AUTHORISING_USER in event.content
|
||||
)
|
||||
if is_invite_via_allow_rule:
|
||||
authoriser_domain = get_domain_from_id(
|
||||
event.content[EventContentFields.AUTHORISING_USER]
|
||||
)
|
||||
if not event.signatures.get(authoriser_domain):
|
||||
raise AuthError(403, "Event not signed by authorising server")
|
||||
|
||||
|
||||
def check_auth_rules_for_event(
|
||||
room_version_obj: RoomVersion, event: EventBase, auth_events: Iterable[EventBase]
|
||||
) -> None:
|
||||
"""Check that an event complies with the auth rules
|
||||
|
||||
Checks whether an event passes the auth rules with a given set of state events
|
||||
|
||||
Assumes that we have already checked that the event is the right shape (it has
|
||||
enough signatures, has a room ID, etc). In other words:
|
||||
|
||||
- it's fine for use in state resolution, when we have already decided whether to
|
||||
accept the event or not, and are now trying to decide whether it should make it
|
||||
into the room state
|
||||
|
||||
- when we're doing the initial event auth, it is only suitable in combination with
|
||||
a bunch of other tests.
|
||||
|
||||
Args:
|
||||
room_version_obj: the version of the room
|
||||
event: the event being checked.
|
||||
auth_events: the room state to check the events against.
|
||||
|
||||
Raises:
|
||||
AuthError if the checks fail
|
||||
"""
|
||||
# We need to ensure that the auth events are actually for the same room, to
|
||||
# stop people from using powers they've been granted in other rooms for
|
||||
# example.
|
||||
for auth_event in auth_events.values():
|
||||
#
|
||||
# Arguably we don't need to do this when we're just doing state res, as presumably
|
||||
# the state res algorithm isn't silly enough to give us events from different rooms.
|
||||
# Still, it's easier to do it anyway.
|
||||
room_id = event.room_id
|
||||
for auth_event in auth_events:
|
||||
if auth_event.room_id != room_id:
|
||||
raise AuthError(
|
||||
403,
|
||||
@@ -85,44 +153,12 @@ def check(
|
||||
"which is in room %s"
|
||||
% (event.event_id, room_id, auth_event.event_id, auth_event.room_id),
|
||||
)
|
||||
|
||||
if do_sig_check:
|
||||
sender_domain = get_domain_from_id(event.sender)
|
||||
|
||||
is_invite_via_3pid = (
|
||||
event.type == EventTypes.Member
|
||||
and event.membership == Membership.INVITE
|
||||
and "third_party_invite" in event.content
|
||||
)
|
||||
|
||||
# Check the sender's domain has signed the event
|
||||
if not event.signatures.get(sender_domain):
|
||||
# We allow invites via 3pid to have a sender from a different
|
||||
# HS, as the sender must match the sender of the original
|
||||
# 3pid invite. This is checked further down with the
|
||||
# other dedicated membership checks.
|
||||
if not is_invite_via_3pid:
|
||||
raise AuthError(403, "Event not signed by sender's server")
|
||||
|
||||
if event.format_version in (EventFormatVersions.V1,):
|
||||
# Only older room versions have event IDs to check.
|
||||
event_id_domain = get_domain_from_id(event.event_id)
|
||||
|
||||
# Check the origin domain has signed the event
|
||||
if not event.signatures.get(event_id_domain):
|
||||
raise AuthError(403, "Event not signed by sending server")
|
||||
|
||||
is_invite_via_allow_rule = (
|
||||
event.type == EventTypes.Member
|
||||
and event.membership == Membership.JOIN
|
||||
and "join_authorised_via_users_server" in event.content
|
||||
)
|
||||
if is_invite_via_allow_rule:
|
||||
authoriser_domain = get_domain_from_id(
|
||||
event.content["join_authorised_via_users_server"]
|
||||
if auth_event.rejected_reason:
|
||||
raise AuthError(
|
||||
403,
|
||||
"During auth for event %s: found rejected event %s in the state"
|
||||
% (event.event_id, auth_event.event_id),
|
||||
)
|
||||
if not event.signatures.get(authoriser_domain):
|
||||
raise AuthError(403, "Event not signed by authorising server")
|
||||
|
||||
# Implementation of https://matrix.org/docs/spec/rooms/v1#authorization-rules
|
||||
#
|
||||
@@ -148,8 +184,10 @@ def check(
|
||||
logger.debug("Allowing! %s", event)
|
||||
return
|
||||
|
||||
auth_dict = {(e.type, e.state_key): e for e in auth_events}
|
||||
|
||||
# 3. If event does not have a m.room.create in its auth_events, reject.
|
||||
creation_event = auth_events.get((EventTypes.Create, ""), None)
|
||||
creation_event = auth_dict.get((EventTypes.Create, ""), None)
|
||||
if not creation_event:
|
||||
raise AuthError(403, "No create event in auth events")
|
||||
|
||||
@@ -157,7 +195,7 @@ def check(
|
||||
creating_domain = get_domain_from_id(event.room_id)
|
||||
originating_domain = get_domain_from_id(event.sender)
|
||||
if creating_domain != originating_domain:
|
||||
if not _can_federate(event, auth_events):
|
||||
if not _can_federate(event, auth_dict):
|
||||
raise AuthError(403, "This room has been marked as unfederatable.")
|
||||
|
||||
# 4. If type is m.room.aliases
|
||||
@@ -179,23 +217,20 @@ def check(
|
||||
logger.debug("Allowing! %s", event)
|
||||
return
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug("Auth events: %s", [a.event_id for a in auth_events.values()])
|
||||
|
||||
# 5. If type is m.room.membership
|
||||
if event.type == EventTypes.Member:
|
||||
_is_membership_change_allowed(room_version_obj, event, auth_events)
|
||||
_is_membership_change_allowed(room_version_obj, event, auth_dict)
|
||||
logger.debug("Allowing! %s", event)
|
||||
return
|
||||
|
||||
_check_event_sender_in_room(event, auth_events)
|
||||
_check_event_sender_in_room(event, auth_dict)
|
||||
|
||||
# Special case to allow m.room.third_party_invite events wherever
|
||||
# a user is allowed to issue invites. Fixes
|
||||
# https://github.com/vector-im/vector-web/issues/1208 hopefully
|
||||
if event.type == EventTypes.ThirdPartyInvite:
|
||||
user_level = get_user_power_level(event.user_id, auth_events)
|
||||
invite_level = get_named_level(auth_events, "invite", 0)
|
||||
user_level = get_user_power_level(event.user_id, auth_dict)
|
||||
invite_level = get_named_level(auth_dict, "invite", 0)
|
||||
|
||||
if user_level < invite_level:
|
||||
raise AuthError(403, "You don't have permission to invite users")
|
||||
@@ -203,20 +238,20 @@ def check(
|
||||
logger.debug("Allowing! %s", event)
|
||||
return
|
||||
|
||||
_can_send_event(event, auth_events)
|
||||
_can_send_event(event, auth_dict)
|
||||
|
||||
if event.type == EventTypes.PowerLevels:
|
||||
_check_power_levels(room_version_obj, event, auth_events)
|
||||
_check_power_levels(room_version_obj, event, auth_dict)
|
||||
|
||||
if event.type == EventTypes.Redaction:
|
||||
check_redaction(room_version_obj, event, auth_events)
|
||||
check_redaction(room_version_obj, event, auth_dict)
|
||||
|
||||
if (
|
||||
event.type == EventTypes.MSC2716_INSERTION
|
||||
or event.type == EventTypes.MSC2716_CHUNK
|
||||
or event.type == EventTypes.MSC2716_BATCH
|
||||
or event.type == EventTypes.MSC2716_MARKER
|
||||
):
|
||||
check_historical(room_version_obj, event, auth_events)
|
||||
check_historical(room_version_obj, event, auth_dict)
|
||||
|
||||
logger.debug("Allowing! %s", event)
|
||||
|
||||
@@ -381,7 +416,9 @@ def _is_membership_change_allowed(
|
||||
# Note that if the caller is in the room or invited, then they do
|
||||
# not need to meet the allow rules.
|
||||
if not caller_in_room and not caller_invited:
|
||||
authorising_user = event.content.get("join_authorised_via_users_server")
|
||||
authorising_user = event.content.get(
|
||||
EventContentFields.AUTHORISING_USER
|
||||
)
|
||||
|
||||
if authorising_user is None:
|
||||
raise AuthError(403, "Join event is missing authorising user.")
|
||||
@@ -552,14 +589,14 @@ def check_historical(
|
||||
auth_events: StateMap[EventBase],
|
||||
) -> None:
|
||||
"""Check whether the event sender is allowed to send historical related
|
||||
events like "insertion", "chunk", and "marker".
|
||||
events like "insertion", "batch", and "marker".
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
Raises:
|
||||
AuthError if the event sender is not allowed to send historical related events
|
||||
("insertion", "chunk", and "marker").
|
||||
("insertion", "batch", and "marker").
|
||||
"""
|
||||
# Ignore the auth checks in room versions that do not support historical
|
||||
# events
|
||||
@@ -573,7 +610,7 @@ def check_historical(
|
||||
if user_level < historical_level:
|
||||
raise AuthError(
|
||||
403,
|
||||
'You don\'t have permission to send send historical related events ("insertion", "chunk", and "marker")',
|
||||
'You don\'t have permission to send send historical related events ("insertion", "batch", and "marker")',
|
||||
)
|
||||
|
||||
|
||||
@@ -836,10 +873,10 @@ def auth_types_for_event(
|
||||
auth_types.add(key)
|
||||
|
||||
if room_version.msc3083_join_rules and membership == Membership.JOIN:
|
||||
if "join_authorised_via_users_server" in event.content:
|
||||
if EventContentFields.AUTHORISING_USER in event.content:
|
||||
key = (
|
||||
EventTypes.Member,
|
||||
event.content["join_authorised_via_users_server"],
|
||||
event.content[EventContentFields.AUTHORISING_USER],
|
||||
)
|
||||
auth_types.add(key)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user