Compare commits
50 Commits
v1.54.0
...
anoa/docs_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3360be1829 | ||
|
|
19ca533bcc | ||
|
|
72e7f1c420 | ||
|
|
ea27528b5d | ||
|
|
52a947dc46 | ||
|
|
88cd6f9378 | ||
|
|
3e4af36bc8 | ||
|
|
a4c1fdb44a | ||
|
|
15382b1afa | ||
|
|
690cb4f3b3 | ||
|
|
032688854b | ||
|
|
180d8ff0d4 | ||
|
|
dc8d825ef2 | ||
|
|
9a0172d49f | ||
|
|
5627182788 | ||
|
|
0dc9c5653c | ||
|
|
bfa7d6b035 | ||
|
|
2ce27a24fe | ||
|
|
ca9234a9eb | ||
|
|
d8bab6793c | ||
|
|
26211fec24 | ||
|
|
f63bedef07 | ||
|
|
0211f18d65 | ||
|
|
00a67f831a | ||
|
|
0752ab7a36 | ||
|
|
75574726a7 | ||
|
|
158e0937eb | ||
|
|
cd1ae3d0b4 | ||
|
|
36071d39f7 | ||
|
|
4aeb00ca20 | ||
|
|
423cca9efe | ||
|
|
87c230c27c | ||
|
|
d56202b038 | ||
|
|
8533c8b03d | ||
|
|
fb0ffa9676 | ||
|
|
9297d040a7 | ||
|
|
7e91107be1 | ||
|
|
1d11b452b7 | ||
|
|
a511a890d7 | ||
|
|
61fd2a8f59 | ||
|
|
31b125ccec | ||
|
|
11282ade1d | ||
|
|
1fbe0316a9 | ||
|
|
106959b3cf | ||
|
|
2ffaf30803 | ||
|
|
b4461e7d8a | ||
|
|
594a07ede4 | ||
|
|
1103c5fe8a | ||
|
|
f3f0ab10fe | ||
|
|
6adb89ff00 |
@@ -21,7 +21,7 @@ python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Run the export-data command on the sqlite test database
|
||||
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
@@ -41,7 +41,7 @@ fi
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# Run the export-data command on postgres database
|
||||
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
|
||||
@@ -25,17 +25,19 @@ python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# We should be able to run twice against the same database.
|
||||
echo "+++ Run synapse_port_db a second time"
|
||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
#####
|
||||
|
||||
@@ -46,7 +48,7 @@ echo "--- Prepare empty SQLite database"
|
||||
# we do this by deleting the sqlite db, and then doing the same again.
|
||||
rm .ci/test_db.db
|
||||
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py \
|
||||
@@ -54,4 +56,4 @@ scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-b
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
@@ -3,11 +3,9 @@
|
||||
|
||||
# things to include
|
||||
!docker
|
||||
!scripts
|
||||
!synapse
|
||||
!MANIFEST.in
|
||||
!README.rst
|
||||
!setup.py
|
||||
!synctl
|
||||
|
||||
**/__pycache__
|
||||
|
||||
7
.github/workflows/release-artifacts.yml
vendored
7
.github/workflows/release-artifacts.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
dists='["debian:sid"]'
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages --show-dists-json)
|
||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||
fi
|
||||
echo "::set-output name=distros::$dists"
|
||||
# map the step outputs to job outputs
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
# for the cache magic here
|
||||
run: |
|
||||
./src/scripts-dev/build_debian_packages \
|
||||
./src/scripts-dev/build_debian_packages.py \
|
||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
||||
--docker-build-arg=--progress=plain \
|
||||
@@ -112,7 +112,8 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
files: |
|
||||
python-dist/*
|
||||
Sdist/*
|
||||
Wheel/*
|
||||
debs.tar.xz
|
||||
# if it's not already published, keep the release as a draft.
|
||||
draft: true
|
||||
|
||||
35
.github/workflows/tests.yml
vendored
35
.github/workflows/tests.yml
vendored
@@ -16,7 +16,8 @@ jobs:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install -e .
|
||||
- run: scripts-dev/generate_sample_config --check
|
||||
- run: scripts-dev/generate_sample_config.sh --check
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -51,7 +52,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
- run: scripts-dev/check-newsfragment
|
||||
- run: scripts-dev/check-newsfragment.sh
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||
|
||||
@@ -376,7 +377,7 @@ jobs:
|
||||
# Run Complement
|
||||
- run: |
|
||||
set -o pipefail
|
||||
go test -v -json -p 1 -tags synapse_blacklist,msc2403 ./tests/... 2>&1 | gotestfmt
|
||||
go test -v -json -p 1 -tags synapse_blacklist,msc2403,msc2716,msc3030 ./tests/... 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
env:
|
||||
@@ -387,34 +388,22 @@ jobs:
|
||||
tests-done:
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- check-sampleconfig
|
||||
- lint
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- trial
|
||||
- trial-olddeps
|
||||
- sytest
|
||||
- export-data
|
||||
- portdb
|
||||
- complement
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set build result
|
||||
env:
|
||||
NEEDS_CONTEXT: ${{ toJSON(needs) }}
|
||||
# the `jq` incantation dumps out a series of "<job> <result>" lines.
|
||||
# we set it to an intermediate variable to avoid a pipe, which makes it
|
||||
# hard to set $rc.
|
||||
run: |
|
||||
rc=0
|
||||
results=$(jq -r 'to_entries[] | [.key,.value.result] | join(" ")' <<< $NEEDS_CONTEXT)
|
||||
while read job result ; do
|
||||
# The newsfile lint may be skipped on non PR builds
|
||||
if [ $result == "skipped" ] && [ $job == "lint-newsfile" ]; then
|
||||
continue
|
||||
fi
|
||||
- uses: matrix-org/done-action@v2
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
if [ "$result" != "success" ]; then
|
||||
echo "::set-failed ::Job $job returned $result"
|
||||
rc=1
|
||||
fi
|
||||
done <<< $results
|
||||
exit $rc
|
||||
# The newsfile lint may be skipped on non PR builds
|
||||
skippable:
|
||||
lint-newsfile
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
include synctl
|
||||
include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
@@ -17,7 +16,6 @@ recursive-include synapse/storage *.txt
|
||||
recursive-include synapse/storage *.md
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include scripts *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
@@ -53,5 +51,4 @@ prune contrib
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune snap
|
||||
prune stubs
|
||||
|
||||
@@ -312,6 +312,9 @@ We recommend using the demo which starts 3 federated instances running on ports
|
||||
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
|
||||
See the [demo documentation](https://matrix-org.github.io/synapse/develop/development/demo.html)
|
||||
for more information.
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
|
||||
@@ -33,7 +33,7 @@ site-url = "/synapse/"
|
||||
additional-css = [
|
||||
"docs/website_files/table-of-contents.css",
|
||||
"docs/website_files/remove-nav-buttons.css",
|
||||
"docs/website_files/indent-section-headers.css",
|
||||
"docs/website_files/section-headers.css",
|
||||
]
|
||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||
theme = "docs/website_files/theme"
|
||||
1
changelog.d/11700.removal
Normal file
1
changelog.d/11700.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove workaround introduced in Synapse 1.50.0 for Mjolnir compatibility. Breaks compatibility with Mjolnir 1.3.1 and earlier.
|
||||
1
changelog.d/11915.misc
Normal file
1
changelog.d/11915.misc
Normal file
@@ -0,0 +1 @@
|
||||
Simplify the `ApplicationService` class' set of public methods related to interest checking.
|
||||
1
changelog.d/11998.doc
Normal file
1
changelog.d/11998.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fix complexity checking config example in [Resource Constrained Devices](https://matrix-org.github.io/synapse/v1.54/other/running_synapse_on_single_board_computers.html) docs page.
|
||||
1
changelog.d/12028.feature
Normal file
1
changelog.d/12028.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add third-party rules rules callbacks `check_can_shutdown_room` and `check_can_deactivate_user`.
|
||||
1
changelog.d/12042.misc
Normal file
1
changelog.d/12042.misc
Normal file
@@ -0,0 +1 @@
|
||||
Correct type hints for txredis.
|
||||
1
changelog.d/12090.bugfix
Normal file
1
changelog.d/12090.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Use the proper serialization format for bundled thread aggregations. The bug has existed since Synapse v1.48.0.
|
||||
1
changelog.d/12101.misc
Normal file
1
changelog.d/12101.misc
Normal file
@@ -0,0 +1 @@
|
||||
Limit the size of `aggregation_key` on annotations.
|
||||
1
changelog.d/12108.misc
Normal file
1
changelog.d/12108.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add type hints to `tests/rest/client`.
|
||||
1
changelog.d/12113.bugfix
Normal file
1
changelog.d/12113.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug when redacting events with relations.
|
||||
1
changelog.d/12118.misc
Normal file
1
changelog.d/12118.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move scripts to Synapse package and expose as setuptools entry points.
|
||||
1
changelog.d/12121.bugfix
Normal file
1
changelog.d/12121.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug when redacting events with relations.
|
||||
1
changelog.d/12128.misc
Normal file
1
changelog.d/12128.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix data validation to compare to lists, not sequences.
|
||||
1
changelog.d/12130.bugfix
Normal file
1
changelog.d/12130.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug when redacting events with relations.
|
||||
1
changelog.d/12131.misc
Normal file
1
changelog.d/12131.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix CI not attaching source distributions and wheels to the GitHub releases.
|
||||
1
changelog.d/12132.feature
Normal file
1
changelog.d/12132.feature
Normal file
@@ -0,0 +1 @@
|
||||
Improve performance of logging in for large accounts.
|
||||
1
changelog.d/12135.feature
Normal file
1
changelog.d/12135.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add experimental env var `SYNAPSE_ASYNC_IO_REACTOR` that causes Synapse to use the asyncio reactor for Twisted.
|
||||
1
changelog.d/12136.misc
Normal file
1
changelog.d/12136.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused mocks from `test_typing`.
|
||||
1
changelog.d/12137.misc
Normal file
1
changelog.d/12137.misc
Normal file
@@ -0,0 +1 @@
|
||||
Give `scripts-dev` scripts suffixes for neater CI config.
|
||||
1
changelog.d/12138.removal
Normal file
1
changelog.d/12138.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove backwards compatibilty with pagination tokens from the `/relations` and `/aggregations` endpoints generated from Synapse < v1.52.0.
|
||||
1
changelog.d/12140.misc
Normal file
1
changelog.d/12140.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move `synctl` into `synapse._scripts` and expose as an entry point.
|
||||
1
changelog.d/12142.misc
Normal file
1
changelog.d/12142.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move the snapcraft configuration file to `contrib`.
|
||||
1
changelog.d/12143.doc
Normal file
1
changelog.d/12143.doc
Normal file
@@ -0,0 +1 @@
|
||||
Improve documentation for demo scripts.
|
||||
1
changelog.d/12144.misc
Normal file
1
changelog.d/12144.misc
Normal file
@@ -0,0 +1 @@
|
||||
Enable [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) Complement tests in CI.
|
||||
1
changelog.d/12145.misc
Normal file
1
changelog.d/12145.misc
Normal file
@@ -0,0 +1 @@
|
||||
Enable [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) Complement tests in CI.
|
||||
1
changelog.d/12146.misc
Normal file
1
changelog.d/12146.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add type hints to `tests/rest`.
|
||||
1
changelog.d/12149.misc
Normal file
1
changelog.d/12149.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add test for `ObservableDeferred`'s cancellation behaviour.
|
||||
1
changelog.d/12150.misc
Normal file
1
changelog.d/12150.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use `ParamSpec` in type hints for `synapse.logging.context`.
|
||||
1
changelog.d/12151.feature
Normal file
1
changelog.d/12151.feature
Normal file
@@ -0,0 +1 @@
|
||||
Support the stable identifiers from [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440): threads.
|
||||
1
changelog.d/12152.misc
Normal file
1
changelog.d/12152.misc
Normal file
@@ -0,0 +1 @@
|
||||
Prune unused jobs from `tox` config.
|
||||
1
changelog.d/12153.misc
Normal file
1
changelog.d/12153.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move CI checks out of tox, to facilitate a move to using poetry.
|
||||
1
changelog.d/12154.misc
Normal file
1
changelog.d/12154.misc
Normal file
@@ -0,0 +1 @@
|
||||
Avoid generating state groups for local out-of-band leaves.
|
||||
1
changelog.d/12155.misc
Normal file
1
changelog.d/12155.misc
Normal file
@@ -0,0 +1 @@
|
||||
Avoid trying to calculate the state at outlier events.
|
||||
1
changelog.d/12156.misc
Normal file
1
changelog.d/12156.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix some type annotations.
|
||||
1
changelog.d/12157.bugfix
Normal file
1
changelog.d/12157.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in #4864 whereby background updates are never run with the default background batch size.
|
||||
1
changelog.d/12159.misc
Normal file
1
changelog.d/12159.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add type hints for `ObservableDeferred` attributes.
|
||||
1
changelog.d/12161.misc
Normal file
1
changelog.d/12161.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use a prebuilt Action for the `tests-done` CI job.
|
||||
1
changelog.d/12163.misc
Normal file
1
changelog.d/12163.misc
Normal file
@@ -0,0 +1 @@
|
||||
Reduce number of DB queries made during processing of `/sync`.
|
||||
1
changelog.d/12173.misc
Normal file
1
changelog.d/12173.misc
Normal file
@@ -0,0 +1 @@
|
||||
Avoid trying to calculate the state at outlier events.
|
||||
1
changelog.d/12175.bugfix
Normal file
1
changelog.d/12175.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug where non-standard information was returned from the `/hierarchy` API. Introduced in Synapse v1.41.0.
|
||||
1
changelog.d/12179.doc
Normal file
1
changelog.d/12179.doc
Normal file
@@ -0,0 +1 @@
|
||||
Updates to the Room DAG concepts development document.
|
||||
1
changelog.d/12182.misc
Normal file
1
changelog.d/12182.misc
Normal file
@@ -0,0 +1 @@
|
||||
Retry HTTP replication failures, this should prevent 502's when restarting stateful workers (main, event persisters, stream writers). Contributed by Nick @ Beeper.
|
||||
1
changelog.d/12187.misc
Normal file
1
changelog.d/12187.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused variables.
|
||||
1
changelog.d/12189.bugfix
Normal file
1
changelog.d/12189.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug when redacting events with relations.
|
||||
1
changelog.d/12192.misc
Normal file
1
changelog.d/12192.misc
Normal file
@@ -0,0 +1 @@
|
||||
Rename `HomeServer.get_tcp_replication` to `get_replication_command_handler`.
|
||||
1
changelog.d/12197.misc
Normal file
1
changelog.d/12197.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove some dead code.
|
||||
@@ -20,7 +20,7 @@ apps:
|
||||
generate-config:
|
||||
command: generate_config
|
||||
generate-signing-key:
|
||||
command: generate_signing_key.py
|
||||
command: generate_signing_key
|
||||
register-new-matrix-user:
|
||||
command: register_new_matrix_user
|
||||
plugs: [network]
|
||||
11
demo/.gitignore
vendored
11
demo/.gitignore
vendored
@@ -1,7 +1,4 @@
|
||||
*.db
|
||||
*.log
|
||||
*.log.*
|
||||
*.pid
|
||||
|
||||
/media_store.*
|
||||
/etc
|
||||
# Ignore all the temporary files from the demo servers.
|
||||
8080/
|
||||
8081/
|
||||
8082/
|
||||
|
||||
26
demo/README
26
demo/README
@@ -1,26 +0,0 @@
|
||||
DO NOT USE THESE DEMO SERVERS IN PRODUCTION
|
||||
|
||||
Requires you to have done:
|
||||
python setup.py develop
|
||||
|
||||
|
||||
The demo start.sh will start three synapse servers on ports 8080, 8081 and 8082, with host names localhost:$port. This can be easily changed to `hostname`:$port in start.sh if required.
|
||||
|
||||
To enable the servers to communicate untrusted ssl certs are used. In order to do this the servers do not check the certs
|
||||
and are configured in a highly insecure way. Do not use these configuration files in production.
|
||||
|
||||
stop.sh will stop the synapse servers and the webclient.
|
||||
|
||||
clean.sh will delete the databases and log files.
|
||||
|
||||
To start a completely new set of servers, run:
|
||||
|
||||
./demo/stop.sh; ./demo/clean.sh && ./demo/start.sh
|
||||
|
||||
|
||||
Logs and sqlitedb will be stored in demo/808{0,1,2}.{log,db}
|
||||
|
||||
|
||||
|
||||
Also note that when joining a public room on a different HS via "#foo:bar.net", then you are (in the current impl) joining a room with room_id "foo". This means that it won't work if your HS already has a room with that name.
|
||||
|
||||
@@ -4,6 +4,9 @@ set -e
|
||||
|
||||
DIR="$( cd "$( dirname "$0" )" && pwd )"
|
||||
|
||||
# Ensure that the servers are stopped.
|
||||
$DIR/stop.sh
|
||||
|
||||
PID_FILE="$DIR/servers.pid"
|
||||
|
||||
if [ -f "$PID_FILE" ]; then
|
||||
|
||||
@@ -6,8 +6,6 @@ CWD=$(pwd)
|
||||
|
||||
cd "$DIR/.." || exit
|
||||
|
||||
mkdir -p demo/etc
|
||||
|
||||
PYTHONPATH=$(readlink -f "$(pwd)")
|
||||
export PYTHONPATH
|
||||
|
||||
@@ -21,22 +19,26 @@ for port in 8080 8081 8082; do
|
||||
mkdir -p demo/$port
|
||||
pushd demo/$port || exit
|
||||
|
||||
#rm $DIR/etc/$port.config
|
||||
# Generate the configuration for the homeserver at localhost:848x.
|
||||
python3 -m synapse.app.homeserver \
|
||||
--generate-config \
|
||||
-H "localhost:$https_port" \
|
||||
--config-path "$DIR/etc/$port.config" \
|
||||
--server-name "localhost:$port" \
|
||||
--config-path "$port.config" \
|
||||
--report-stats no
|
||||
|
||||
if ! grep -F "Customisation made by demo/start.sh" -q "$DIR/etc/$port.config"; then
|
||||
# Generate tls keys
|
||||
openssl req -x509 -newkey rsa:4096 -keyout "$DIR/etc/localhost:$https_port.tls.key" -out "$DIR/etc/localhost:$https_port.tls.crt" -days 365 -nodes -subj "/O=matrix"
|
||||
if ! grep -F "Customisation made by demo/start.sh" -q "$port.config"; then
|
||||
# Generate TLS keys.
|
||||
openssl req -x509 -newkey rsa:4096 \
|
||||
-keyout "localhost:$port.tls.key" \
|
||||
-out "localhost:$port.tls.crt" \
|
||||
-days 365 -nodes -subj "/O=matrix"
|
||||
|
||||
# Regenerate configuration
|
||||
# Add customisations to the configuration.
|
||||
{
|
||||
printf '\n\n# Customisation made by demo/start.sh\n'
|
||||
printf '\n\n# Customisation made by demo/start.sh\n\n'
|
||||
echo "public_baseurl: http://localhost:$port/"
|
||||
echo 'enable_registration: true'
|
||||
echo ''
|
||||
|
||||
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
||||
# Please don't accidentaly bork me with your fancy settings.
|
||||
@@ -63,38 +65,34 @@ for port in 8080 8081 8082; do
|
||||
|
||||
echo "${listeners}"
|
||||
|
||||
# Disable tls for the servers
|
||||
printf '\n\n# Disable tls on the servers.'
|
||||
# Disable TLS for the servers
|
||||
printf '\n\n# Disable TLS for the servers.'
|
||||
echo '# DO NOT USE IN PRODUCTION'
|
||||
echo 'use_insecure_ssl_client_just_for_testing_do_not_use: true'
|
||||
echo 'federation_verify_certificates: false'
|
||||
|
||||
# Set tls paths
|
||||
echo "tls_certificate_path: \"$DIR/etc/localhost:$https_port.tls.crt\""
|
||||
echo "tls_private_key_path: \"$DIR/etc/localhost:$https_port.tls.key\""
|
||||
# Set paths for the TLS certificates.
|
||||
echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\""
|
||||
echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\""
|
||||
|
||||
# Ignore keys from the trusted keys server
|
||||
echo '# Ignore keys from the trusted keys server'
|
||||
echo 'trusted_key_servers:'
|
||||
echo ' - server_name: "matrix.org"'
|
||||
echo ' accept_keys_insecurely: true'
|
||||
echo ''
|
||||
|
||||
# Reduce the blacklist
|
||||
blacklist=$(cat <<-BLACK
|
||||
# Set the blacklist so that it doesn't include 127.0.0.1, ::1
|
||||
federation_ip_range_blacklist:
|
||||
- '10.0.0.0/8'
|
||||
- '172.16.0.0/12'
|
||||
- '192.168.0.0/16'
|
||||
- '100.64.0.0/10'
|
||||
- '169.254.0.0/16'
|
||||
- 'fe80::/64'
|
||||
- 'fc00::/7'
|
||||
BLACK
|
||||
# Allow the servers to communicate over localhost.
|
||||
allow_list=$(cat <<-ALLOW_LIST
|
||||
# Allow the servers to communicate over localhost.
|
||||
ip_range_whitelist:
|
||||
- '127.0.0.1/8'
|
||||
- '::1/128'
|
||||
ALLOW_LIST
|
||||
)
|
||||
|
||||
echo "${blacklist}"
|
||||
} >> "$DIR/etc/$port.config"
|
||||
echo "${allow_list}"
|
||||
} >> "$port.config"
|
||||
fi
|
||||
|
||||
# Check script parameters
|
||||
@@ -141,19 +139,18 @@ for port in 8080 8081 8082; do
|
||||
burst_count: 1000
|
||||
RC
|
||||
)
|
||||
echo "${ratelimiting}" >> "$DIR/etc/$port.config"
|
||||
echo "${ratelimiting}" >> "$port.config"
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! grep -F "full_twisted_stacktraces" -q "$DIR/etc/$port.config"; then
|
||||
echo "full_twisted_stacktraces: true" >> "$DIR/etc/$port.config"
|
||||
fi
|
||||
if ! grep -F "report_stats" -q "$DIR/etc/$port.config" ; then
|
||||
echo "report_stats: false" >> "$DIR/etc/$port.config"
|
||||
# Always disable reporting of stats if the option is not there.
|
||||
if ! grep -F "report_stats" -q "$port.config" ; then
|
||||
echo "report_stats: false" >> "$port.config"
|
||||
fi
|
||||
|
||||
# Run the homeserver in the background.
|
||||
python3 -m synapse.app.homeserver \
|
||||
--config-path "$DIR/etc/$port.config" \
|
||||
--config-path "$port.config" \
|
||||
-D \
|
||||
|
||||
popd || exit
|
||||
|
||||
@@ -46,8 +46,7 @@ RUN \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy just what we need to pip install
|
||||
COPY scripts /synapse/scripts/
|
||||
COPY MANIFEST.in README.rst setup.py synctl /synapse/
|
||||
COPY MANIFEST.in README.rst setup.py /synapse/
|
||||
COPY synapse/__init__.py /synapse/synapse/__init__.py
|
||||
COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
|
||||
|
||||
|
||||
@@ -82,6 +82,7 @@
|
||||
- [Release Cycle](development/releases.md)
|
||||
- [Git Usage](development/git.md)
|
||||
- [Testing]()
|
||||
- [Demo scripts](development/demo.md)
|
||||
- [OpenTracing](opentracing.md)
|
||||
- [Database Schemas](development/database_schema.md)
|
||||
- [Experimental features](development/experimental_features.md)
|
||||
|
||||
@@ -172,6 +172,6 @@ frobber:
|
||||
```
|
||||
|
||||
Note that the sample configuration is generated from the synapse code
|
||||
and is maintained by a script, `scripts-dev/generate_sample_config`.
|
||||
and is maintained by a script, `scripts-dev/generate_sample_config.sh`.
|
||||
Making sure that the output from this script matches the desired format
|
||||
is left as an exercise for the reader!
|
||||
|
||||
@@ -158,9 +158,9 @@ same as integers.
|
||||
There are three separate aspects to this:
|
||||
|
||||
* Any new boolean column must be added to the `BOOLEAN_COLUMNS` list in
|
||||
`scripts/synapse_port_db`. This tells the port script to cast the integer
|
||||
value from SQLite to a boolean before writing the value to the postgres
|
||||
database.
|
||||
`synapse/_scripts/synapse_port_db.py`. This tells the port script to cast
|
||||
the integer value from SQLite to a boolean before writing the value to the
|
||||
postgres database.
|
||||
|
||||
* Before SQLite 3.23, `TRUE` and `FALSE` were not recognised as constants by
|
||||
SQLite, and the `IS [NOT] TRUE`/`IS [NOT] FALSE` operators were not
|
||||
|
||||
41
docs/development/demo.md
Normal file
41
docs/development/demo.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Synapse demo setup
|
||||
|
||||
**DO NOT USE THESE DEMO SERVERS IN PRODUCTION**
|
||||
|
||||
Requires you to have a [Synapse development environment setup](https://matrix-org.github.io/synapse/develop/development/contributing_guide.html#4-install-the-dependencies).
|
||||
|
||||
The demo setup allows running three federation Synapse servers, with server
|
||||
names `localhost:8080`, `localhost:8081`, and `localhost:8082`.
|
||||
|
||||
You can access them via any Matrix client over HTTP at `localhost:8080`,
|
||||
`localhost:8081`, and `localhost:8082` or over HTTPS at `localhost:8480`,
|
||||
`localhost:8481`, and `localhost:8482`.
|
||||
|
||||
To enable the servers to communicate, self-signed SSL certificates are generated
|
||||
and the servers are configured in a highly insecure way, including:
|
||||
|
||||
* Not checking certificates over federation.
|
||||
* Not verifying keys.
|
||||
|
||||
The servers are configured to store their data under `demo/8080`, `demo/8081`, and
|
||||
`demo/8082`. This includes configuration, logs, SQLite databases, and media.
|
||||
|
||||
Note that when joining a public room on a different HS via "#foo:bar.net", then
|
||||
you are (in the current impl) joining a room with room_id "foo". This means that
|
||||
it won't work if your HS already has a room with that name.
|
||||
|
||||
## Using the demo scripts
|
||||
|
||||
There's three main scripts with straightforward purposes:
|
||||
|
||||
* `start.sh` will start the Synapse servers, generating any missing configuration.
|
||||
* This accepts a single parameter `--no-rate-limit` to "disable" rate limits
|
||||
(they actually still exist, but are very high).
|
||||
* `stop.sh` will stop the Synapse servers.
|
||||
* `clean.sh` will delete the configuration, databases, log files, etc.
|
||||
|
||||
To start a completely new set of servers, run:
|
||||
|
||||
```sh
|
||||
./demo/stop.sh; ./demo/clean.sh && ./demo/start.sh
|
||||
```
|
||||
@@ -30,13 +30,57 @@ rather than skipping any that arrived late; whereas if you're looking at a
|
||||
historical section of timeline (i.e. `/messages`), you want to see the best
|
||||
representation of the state of the room as others were seeing it at the time.
|
||||
|
||||
## Outliers
|
||||
|
||||
We mark an event as an `outlier` when we haven't figured out the state for the
|
||||
room at that point in the DAG yet. They are "floating" events that we haven't
|
||||
yet correlated to the DAG.
|
||||
|
||||
Outliers typically arise when we fetch the auth chain or state for a given
|
||||
event. When that happens, we just grab the events in the state/auth chain,
|
||||
without calculating the state at those events, or backfilling their
|
||||
`prev_events`.
|
||||
|
||||
So, typically, we won't have the `prev_events` of an `outlier` in the database,
|
||||
(though it's entirely possible that we *might* have them for some other
|
||||
reason). Other things that make outliers different from regular events:
|
||||
|
||||
* We don't have state for them, so there should be no entry in
|
||||
`event_to_state_groups` for an outlier. (In practice this isn't always
|
||||
the case, though I'm not sure why: see https://github.com/matrix-org/synapse/issues/12201).
|
||||
|
||||
* We don't record entries for them in the `event_edges`,
|
||||
`event_forward_extremeties` or `event_backward_extremities` tables.
|
||||
|
||||
Since outliers are not tied into the DAG, they do not normally form part of the
|
||||
timeline sent down to clients via `/sync` or `/messages`; however there is an
|
||||
exception:
|
||||
|
||||
### Out-of-band membership events
|
||||
|
||||
A special case of outlier events are some membership events for federated rooms
|
||||
that we aren't full members of. For example:
|
||||
|
||||
* invites received over federation, before we join the room
|
||||
* *rejections* for said invites
|
||||
* knock events for rooms that we would like to join but have not yet joined.
|
||||
|
||||
In all the above cases, we don't have the state for the room, which is why they
|
||||
are treated as outliers. They are a bit special though, in that they are
|
||||
proactively sent to clients via `/sync`.
|
||||
|
||||
## Forward extremity
|
||||
|
||||
Most-recent-in-time events in the DAG which are not referenced by any other events' `prev_events` yet.
|
||||
Most-recent-in-time events in the DAG which are not referenced by any other
|
||||
events' `prev_events` yet. (In this definition, outliers, rejected events, and
|
||||
soft-failed events don't count.)
|
||||
|
||||
The forward extremities of a room are used as the `prev_events` when the next event is sent.
|
||||
The forward extremities of a room (or at least, a subset of them, if there are
|
||||
more than ten) are used as the `prev_events` when the next event is sent.
|
||||
|
||||
The "current state" of a room (ie: the state which would be used if we
|
||||
generated a new event) is, therefore, the resolution of the room states
|
||||
at each of the forward extremities.
|
||||
|
||||
## Backward extremity
|
||||
|
||||
@@ -44,23 +88,14 @@ The current marker of where we have backfilled up to and will generally be the
|
||||
`prev_events` of the oldest-in-time events we have in the DAG. This gives a starting point when
|
||||
backfilling history.
|
||||
|
||||
When we persist a non-outlier event, we clear it as a backward extremity and set
|
||||
all of its `prev_events` as the new backward extremities if they aren't already
|
||||
persisted in the `events` table.
|
||||
|
||||
|
||||
## Outliers
|
||||
|
||||
We mark an event as an `outlier` when we haven't figured out the state for the
|
||||
room at that point in the DAG yet.
|
||||
|
||||
We won't *necessarily* have the `prev_events` of an `outlier` in the database,
|
||||
but it's entirely possible that we *might*.
|
||||
|
||||
For example, when we fetch the event auth chain or state for a given event, we
|
||||
mark all of those claimed auth events as outliers because we haven't done the
|
||||
state calculation ourself.
|
||||
Note that, unlike forward extremities, we typically don't have any backward
|
||||
extremity events themselves in the database - or, if we do, they will be "outliers" (see
|
||||
above). Either way, we don't expect to have the room state at a backward extremity.
|
||||
|
||||
When we persist a non-outlier event, if it was previously a backward extremity,
|
||||
we clear it as a backward extremity and set all of its `prev_events` as the new
|
||||
backward extremities if they aren't already persisted as non-outliers. This
|
||||
therefore keeps the backward extremities up-to-date.
|
||||
|
||||
## State groups
|
||||
|
||||
|
||||
@@ -63,4 +63,5 @@ release of Synapse.
|
||||
|
||||
If you want to get up and running quickly with a trio of homeservers in a
|
||||
private federation, there is a script in the `demo` directory. This is mainly
|
||||
useful just for development purposes. See [demo/README](https://github.com/matrix-org/synapse/tree/develop/demo/).
|
||||
useful just for development purposes. See
|
||||
[demo scripts](https://matrix-org.github.io/synapse/develop/development/demo.html).
|
||||
|
||||
@@ -148,6 +148,49 @@ deny an incoming event, see [`check_event_for_spam`](spam_checker_callbacks.md#c
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `check_can_shutdown_room`
|
||||
|
||||
_First introduced in Synapse v1.55.0_
|
||||
|
||||
```python
|
||||
async def check_can_shutdown_room(
|
||||
user_id: str, room_id: str,
|
||||
) -> bool:
|
||||
```
|
||||
|
||||
Called when an admin user requests the shutdown of a room. The module must return a
|
||||
boolean indicating whether the shutdown can go through. If the callback returns `False`,
|
||||
the shutdown will not proceed and the caller will see a `M_FORBIDDEN` error.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
### `check_can_deactivate_user`
|
||||
|
||||
_First introduced in Synapse v1.55.0_
|
||||
|
||||
```python
|
||||
async def check_can_deactivate_user(
|
||||
user_id: str, by_admin: bool,
|
||||
) -> bool:
|
||||
```
|
||||
|
||||
Called when the deactivation of a user is requested. User deactivation can be
|
||||
performed by an admin or the user themselves, so developers are encouraged to check the
|
||||
requester when implementing this callback. The module must return a
|
||||
boolean indicating whether the deactivation can go through. If the callback returns `False`,
|
||||
the deactivation will not proceed and the caller will see a `M_FORBIDDEN` error.
|
||||
|
||||
The module is passed two parameters, `user_id` which is the ID of the user being deactivated, and `by_admin` which is `True` if the request is made by a serve admin, and `False` otherwise.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
|
||||
### `on_profile_update`
|
||||
|
||||
_First introduced in Synapse v1.54.0_
|
||||
|
||||
@@ -31,28 +31,29 @@ Anything that requires modifying the device list [#7721](https://github.com/matr
|
||||
Put the below in a new file at /etc/matrix-synapse/conf.d/sbc.yaml to override the defaults in homeserver.yaml.
|
||||
|
||||
```
|
||||
# Set to false to disable presence tracking on this homeserver.
|
||||
# Disable presence tracking, which is currently fairly resource intensive
|
||||
# More info: https://github.com/matrix-org/synapse/issues/9478
|
||||
use_presence: false
|
||||
|
||||
# When this is enabled, the room "complexity" will be checked before a user
|
||||
# joins a new remote room. If it is above the complexity limit, the server will
|
||||
# disallow joining, or will instantly leave.
|
||||
# Set a small complexity limit, preventing users from joining large rooms
|
||||
# which may be resource-intensive to remain a part of.
|
||||
#
|
||||
# Note that this will not prevent users from joining smaller rooms that
|
||||
# eventually become complex.
|
||||
limit_remote_rooms:
|
||||
# Uncomment to enable room complexity checking.
|
||||
#enabled: true
|
||||
enabled: true
|
||||
complexity: 3.0
|
||||
|
||||
# Database configuration
|
||||
database:
|
||||
# Use postgres for the best performance
|
||||
name: psycopg2
|
||||
args:
|
||||
user: matrix-synapse
|
||||
# Generate a long, secure one with a password manager
|
||||
# Generate a long, secure password using a password manager
|
||||
password: hunter2
|
||||
database: matrix-synapse
|
||||
host: localhost
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
```
|
||||
|
||||
Currently the complexity is measured by [current_state_events / 500](https://github.com/matrix-org/synapse/blob/v1.20.1/synapse/storage/databases/main/events_worker.py#L986). You can find join times and your most complex rooms like this:
|
||||
|
||||
@@ -153,9 +153,9 @@ database file (typically `homeserver.db`) to another location. Once the
|
||||
copy is complete, restart synapse. For instance:
|
||||
|
||||
```sh
|
||||
./synctl stop
|
||||
synctl stop
|
||||
cp homeserver.db homeserver.db.snapshot
|
||||
./synctl start
|
||||
synctl start
|
||||
```
|
||||
|
||||
Copy the old config file into a new config file:
|
||||
@@ -192,10 +192,10 @@ Once that has completed, change the synapse config to point at the
|
||||
PostgreSQL database configuration file `homeserver-postgres.yaml`:
|
||||
|
||||
```sh
|
||||
./synctl stop
|
||||
synctl stop
|
||||
mv homeserver.yaml homeserver-old-sqlite.yaml
|
||||
mv homeserver-postgres.yaml homeserver.yaml
|
||||
./synctl start
|
||||
synctl start
|
||||
```
|
||||
|
||||
Synapse should now be running against PostgreSQL.
|
||||
|
||||
@@ -238,8 +238,9 @@ After updating the homeserver configuration, you must restart synapse:
|
||||
|
||||
* If you use synctl:
|
||||
```sh
|
||||
cd /where/you/run/synapse
|
||||
./synctl restart
|
||||
# Depending on how Synapse is installed, synctl may already be on
|
||||
# your PATH. If not, you may need to activate a virtual environment.
|
||||
synctl restart
|
||||
```
|
||||
* If you use systemd:
|
||||
```sh
|
||||
|
||||
@@ -47,7 +47,7 @@ this document.
|
||||
3. Restart Synapse:
|
||||
|
||||
```bash
|
||||
./synctl restart
|
||||
synctl restart
|
||||
```
|
||||
|
||||
To check whether your update was successful, you can check the running
|
||||
@@ -85,6 +85,35 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.55.0
|
||||
|
||||
## `synctl` script has been moved
|
||||
|
||||
The `synctl` script
|
||||
[has been made](https://github.com/matrix-org/synapse/pull/12140) an
|
||||
[entry point](https://packaging.python.org/en/latest/specifications/entry-points/)
|
||||
and no longer exists at the root of Synapse's source tree. If you wish to use
|
||||
`synctl` to manage your homeserver, you should invoke `synctl` directly, e.g.
|
||||
`synctl start` instead of `./synctl start` or `/path/to/synctl start`.
|
||||
|
||||
You will need to ensure `synctl` is on your `PATH`.
|
||||
- This is automatically the case when using
|
||||
[Debian packages](https://packages.matrix.org/debian/) or
|
||||
[docker images](https://hub.docker.com/r/matrixdotorg/synapse)
|
||||
provided by Matrix.org.
|
||||
- When installing from a wheel, sdist, or PyPI, a `synctl` executable is added
|
||||
to your Python installation's `bin`. This should be on your `PATH`
|
||||
automatically, though you might need to activate a virtual environment
|
||||
depending on how you installed Synapse.
|
||||
|
||||
|
||||
## Compatibility dropped for Mjolnir 1.3.1 and earlier
|
||||
|
||||
Synapse v1.55.0 drops support for Mjolnir 1.3.1 and earlier.
|
||||
If you use the Mjolnir module to moderate your homeserver,
|
||||
please upgrade Mjolnir to version 1.3.2 or later before upgrading Synapse.
|
||||
|
||||
|
||||
# Upgrading to v1.54.0
|
||||
|
||||
## Legacy structured logging configuration removal
|
||||
|
||||
@@ -12,7 +12,7 @@ UPDATE users SET admin = 1 WHERE name = '@foo:bar.com';
|
||||
```
|
||||
|
||||
A new server admin user can also be created using the `register_new_matrix_user`
|
||||
command. This is a script that is located in the `scripts/` directory, or possibly
|
||||
command. This is a script that is distributed as part of synapse. It is possibly
|
||||
already on your `$PATH` depending on how Synapse was installed.
|
||||
|
||||
Finding your user's `access_token` is client-dependent, but will usually be shown in the client's settings.
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
/*
|
||||
* Indents each chapter title in the left sidebar so that they aren't
|
||||
* at the same level as the section headers.
|
||||
*/
|
||||
.chapter-item {
|
||||
margin-left: 1em;
|
||||
}
|
||||
20
docs/website_files/section-headers.css
Normal file
20
docs/website_files/section-headers.css
Normal file
@@ -0,0 +1,20 @@
|
||||
/*
|
||||
* Indents each chapter title in the left sidebar so that they aren't
|
||||
* at the same level as the section headers.
|
||||
*/
|
||||
.chapter-item {
|
||||
margin-left: 1em;
|
||||
}
|
||||
|
||||
/*
|
||||
* Prevents a large gap between successive section headers.
|
||||
*
|
||||
* mdbook sets 'margin-top: 2.5em' on h2 and h3 headers. This makes sense when separating
|
||||
* a header from the paragraph beforehand, but has the downside of introducing a large
|
||||
* gap between headers that are next to each other with no text in between.
|
||||
*
|
||||
* This rule reduces the margin in this case.
|
||||
*/
|
||||
h1 + h2, h2 + h3 {
|
||||
margin-top: 1.0em;
|
||||
}
|
||||
32
mypy.ini
32
mypy.ini
@@ -11,7 +11,7 @@ local_partial_types = True
|
||||
no_implicit_optional = True
|
||||
|
||||
files =
|
||||
scripts-dev/sign_json,
|
||||
scripts-dev/,
|
||||
setup.py,
|
||||
synapse/,
|
||||
tests/
|
||||
@@ -23,6 +23,20 @@ files =
|
||||
# https://docs.python.org/3/library/re.html#re.X
|
||||
exclude = (?x)
|
||||
^(
|
||||
|scripts-dev/build_debian_packages.py
|
||||
|scripts-dev/check_signature.py
|
||||
|scripts-dev/definitions.py
|
||||
|scripts-dev/federation_client.py
|
||||
|scripts-dev/hash_history.py
|
||||
|scripts-dev/list_url_patterns.py
|
||||
|scripts-dev/release.py
|
||||
|scripts-dev/tail-synapse.py
|
||||
|
||||
|synapse/_scripts/export_signing_key.py
|
||||
|synapse/_scripts/move_remote_media_to_new_store.py
|
||||
|synapse/_scripts/synapse_port_db.py
|
||||
|synapse/_scripts/update_synapse_database.py
|
||||
|
||||
|synapse/storage/databases/__init__.py
|
||||
|synapse/storage/databases/main/__init__.py
|
||||
|synapse/storage/databases/main/cache.py
|
||||
@@ -74,15 +88,7 @@ exclude = (?x)
|
||||
|tests/push/test_http.py
|
||||
|tests/push/test_presentable_names.py
|
||||
|tests/push/test_push_rule_evaluator.py
|
||||
|tests/rest/client/test_account.py
|
||||
|tests/rest/client/test_filter.py
|
||||
|tests/rest/client/test_report_event.py
|
||||
|tests/rest/client/test_rooms.py
|
||||
|tests/rest/client/test_third_party_rules.py
|
||||
|tests/rest/client/test_transactions.py
|
||||
|tests/rest/client/test_typing.py
|
||||
|tests/rest/key/v2/test_remote_key_resource.py
|
||||
|tests/rest/media/v1/test_base.py
|
||||
|tests/rest/media/v1/test_media_storage.py
|
||||
|tests/rest/media/v1/test_url_preview.py
|
||||
|tests/scripts/test_new_matrix_user.py
|
||||
@@ -246,10 +252,7 @@ disallow_untyped_defs = True
|
||||
[mypy-tests.storage.test_user_directory]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.rest.admin.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.rest.client.*]
|
||||
[mypy-tests.rest.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.federation.transport.test_client]
|
||||
@@ -350,3 +353,6 @@ ignore_missing_imports = True
|
||||
|
||||
[mypy-zope]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-incremental.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@@ -71,4 +71,4 @@ fi
|
||||
|
||||
# Run the tests!
|
||||
echo "Images built; running complement"
|
||||
go test -v -tags synapse_blacklist,msc2403 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests/...
|
||||
go test -v -tags synapse_blacklist,msc2403,msc2716,msc3030 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests/...
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Update/check the docs/sample_config.yaml
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
SAMPLE_CONFIG="docs/sample_config.yaml"
|
||||
SAMPLE_LOG_CONFIG="docs/sample_log_config.yaml"
|
||||
|
||||
check() {
|
||||
diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || return 1
|
||||
}
|
||||
|
||||
if [ "$1" == "--check" ]; then
|
||||
diff -u "$SAMPLE_CONFIG" <(./scripts/generate_config --header-file docs/.sample_config_header.yaml) >/dev/null || {
|
||||
echo -e "\e[1m\e[31m$SAMPLE_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
|
||||
exit 1
|
||||
}
|
||||
diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || {
|
||||
echo -e "\e[1m\e[31m$SAMPLE_LOG_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
|
||||
exit 1
|
||||
}
|
||||
else
|
||||
./scripts/generate_config --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG"
|
||||
./scripts/generate_log_config -o "$SAMPLE_LOG_CONFIG"
|
||||
fi
|
||||
28
scripts-dev/generate_sample_config.sh
Executable file
28
scripts-dev/generate_sample_config.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Update/check the docs/sample_config.yaml
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
SAMPLE_CONFIG="docs/sample_config.yaml"
|
||||
SAMPLE_LOG_CONFIG="docs/sample_log_config.yaml"
|
||||
|
||||
check() {
|
||||
diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || return 1
|
||||
}
|
||||
|
||||
if [ "$1" == "--check" ]; then
|
||||
diff -u "$SAMPLE_CONFIG" <(synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml) >/dev/null || {
|
||||
echo -e "\e[1m\e[31m$SAMPLE_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config.sh\`.\e[0m" >&2
|
||||
exit 1
|
||||
}
|
||||
diff -u "$SAMPLE_LOG_CONFIG" <(synapse/_scripts/generate_log_config.py) >/dev/null || {
|
||||
echo -e "\e[1m\e[31m$SAMPLE_LOG_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config.sh\`.\e[0m" >&2
|
||||
exit 1
|
||||
}
|
||||
else
|
||||
synapse/_scripts/generate_config.py --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG"
|
||||
synapse/_scripts/generate_log_config.py -o "$SAMPLE_LOG_CONFIG"
|
||||
fi
|
||||
@@ -84,17 +84,8 @@ else
|
||||
files=(
|
||||
"synapse" "docker" "tests"
|
||||
# annoyingly, black doesn't find these so we have to list them
|
||||
"scripts/export_signing_key"
|
||||
"scripts/generate_config"
|
||||
"scripts/generate_log_config"
|
||||
"scripts/hash_password"
|
||||
"scripts/register_new_matrix_user"
|
||||
"scripts/synapse_port_db"
|
||||
"scripts/update_synapse_database"
|
||||
"scripts-dev"
|
||||
"scripts-dev/build_debian_packages"
|
||||
"scripts-dev/sign_json"
|
||||
"contrib" "synctl" "setup.py" "synmark" "stubs" ".ci"
|
||||
"contrib" "setup.py" "synmark" "stubs" ".ci"
|
||||
)
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -147,7 +147,7 @@ python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
echo "Running db background jobs..."
|
||||
scripts/update_synapse_database --database-config --run-background-updates "$SQLITE_CONFIG"
|
||||
synapse/_scripts/update_synapse_database.py --database-config --run-background-updates "$SQLITE_CONFIG"
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
echo "Creating postgres database..."
|
||||
@@ -156,10 +156,10 @@ createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_DB_NAME"
|
||||
echo "Copying data from SQLite3 to Postgres with synapse_port_db..."
|
||||
if [ -z "$COVERAGE" ]; then
|
||||
# No coverage needed
|
||||
scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
|
||||
synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
|
||||
else
|
||||
# Coverage desired
|
||||
coverage run scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
|
||||
coverage run synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
|
||||
fi
|
||||
|
||||
# Delete schema_version, applied_schema_deltas and applied_module_schemas tables
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse._scripts.register_new_matrix_user import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse._scripts.review_recent_signups import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,45 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use JSON::XS;
|
||||
use LWP::UserAgent;
|
||||
use URI::Escape;
|
||||
|
||||
if (@ARGV < 4) {
|
||||
die "usage: $0 <homeserver url> <access_token> <room_id|room_alias> <group_id>\n";
|
||||
}
|
||||
|
||||
my ($hs, $access_token, $room_id, $group_id) = @ARGV;
|
||||
my $ua = LWP::UserAgent->new();
|
||||
$ua->timeout(10);
|
||||
|
||||
if ($room_id =~ /^#/) {
|
||||
$room_id = uri_escape($room_id);
|
||||
$room_id = decode_json($ua->get("${hs}/_matrix/client/r0/directory/room/${room_id}?access_token=${access_token}")->decoded_content)->{room_id};
|
||||
}
|
||||
|
||||
my $room_users = [ keys %{decode_json($ua->get("${hs}/_matrix/client/r0/rooms/${room_id}/joined_members?access_token=${access_token}")->decoded_content)->{joined}} ];
|
||||
my $group_users = [
|
||||
(map { $_->{user_id} } @{decode_json($ua->get("${hs}/_matrix/client/unstable/groups/${group_id}/users?access_token=${access_token}" )->decoded_content)->{chunk}}),
|
||||
(map { $_->{user_id} } @{decode_json($ua->get("${hs}/_matrix/client/unstable/groups/${group_id}/invited_users?access_token=${access_token}" )->decoded_content)->{chunk}}),
|
||||
];
|
||||
|
||||
die "refusing to sync from empty room" unless (@$room_users);
|
||||
die "refusing to sync to empty group" unless (@$group_users);
|
||||
|
||||
my $diff = {};
|
||||
foreach my $user (@$room_users) { $diff->{$user}++ }
|
||||
foreach my $user (@$group_users) { $diff->{$user}-- }
|
||||
|
||||
foreach my $user (keys %$diff) {
|
||||
if ($diff->{$user} == 1) {
|
||||
warn "inviting $user";
|
||||
print STDERR $ua->put("${hs}/_matrix/client/unstable/groups/${group_id}/admin/users/invite/${user}?access_token=${access_token}", Content=>'{}')->status_line."\n";
|
||||
}
|
||||
elsif ($diff->{$user} == -1) {
|
||||
warn "removing $user";
|
||||
print STDERR $ua->put("${hs}/_matrix/client/unstable/groups/${group_id}/admin/users/remove/${user}?access_token=${access_token}", Content=>'{}')->status_line."\n";
|
||||
}
|
||||
}
|
||||
14
setup.py
14
setup.py
@@ -15,7 +15,6 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import glob
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
@@ -153,8 +152,20 @@ setup(
|
||||
python_requires="~=3.7",
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
# Application
|
||||
"synapse_homeserver = synapse.app.homeserver:main",
|
||||
"synapse_worker = synapse.app.generic_worker:main",
|
||||
"synctl = synapse._scripts.synctl:main",
|
||||
# Scripts
|
||||
"export_signing_key = synapse._scripts.export_signing_key:main",
|
||||
"generate_config = synapse._scripts.generate_config:main",
|
||||
"generate_log_config = synapse._scripts.generate_log_config:main",
|
||||
"generate_signing_key = synapse._scripts.generate_signing_key:main",
|
||||
"hash_password = synapse._scripts.hash_password:main",
|
||||
"register_new_matrix_user = synapse._scripts.register_new_matrix_user:main",
|
||||
"synapse_port_db = synapse._scripts.synapse_port_db:main",
|
||||
"synapse_review_recent_signups = synapse._scripts.review_recent_signups:main",
|
||||
"update_synapse_database = synapse._scripts.update_synapse_database:main",
|
||||
]
|
||||
},
|
||||
classifiers=[
|
||||
@@ -167,6 +178,5 @@ setup(
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
],
|
||||
scripts=["synctl"] + glob.glob("scripts/*"),
|
||||
cmdclass={"test": TestCommand},
|
||||
)
|
||||
|
||||
@@ -20,7 +20,7 @@ from twisted.internet import protocol
|
||||
from twisted.internet.defer import Deferred
|
||||
|
||||
class RedisProtocol(protocol.Protocol):
|
||||
def publish(self, channel: str, message: bytes): ...
|
||||
def publish(self, channel: str, message: bytes) -> "Deferred[None]": ...
|
||||
def ping(self) -> "Deferred[None]": ...
|
||||
def set(
|
||||
self,
|
||||
@@ -52,11 +52,14 @@ def lazyConnection(
|
||||
convertNumbers: bool = ...,
|
||||
) -> RedisProtocol: ...
|
||||
|
||||
class ConnectionHandler: ...
|
||||
# ConnectionHandler doesn't actually inherit from RedisProtocol, but it proxies
|
||||
# most methods to it via ConnectionHandler.__getattr__.
|
||||
class ConnectionHandler(RedisProtocol):
|
||||
def disconnect(self) -> "Deferred[None]": ...
|
||||
|
||||
class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
continueTrying: bool
|
||||
handler: RedisProtocol
|
||||
handler: ConnectionHandler
|
||||
pool: List[RedisProtocol]
|
||||
replyTimeout: Optional[int]
|
||||
def __init__(
|
||||
|
||||
@@ -25,6 +25,27 @@ if sys.version_info < (3, 7):
|
||||
print("Synapse requires Python 3.7 or above.")
|
||||
sys.exit(1)
|
||||
|
||||
# Allow using the asyncio reactor via env var.
|
||||
if bool(os.environ.get("SYNAPSE_ASYNC_IO_REACTOR", False)):
|
||||
try:
|
||||
from incremental import Version
|
||||
|
||||
import twisted
|
||||
|
||||
# We need a bugfix that is included in Twisted 21.2.0:
|
||||
# https://twistedmatrix.com/trac/ticket/9787
|
||||
if twisted.version < Version("Twisted", 21, 2, 0):
|
||||
print("Using asyncio reactor requires Twisted>=21.2.0")
|
||||
sys.exit(1)
|
||||
|
||||
import asyncio
|
||||
|
||||
from twisted.internet import asyncioreactor
|
||||
|
||||
asyncioreactor.install(asyncio.get_event_loop())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Twisted and canonicaljson will fail to import when this file is executed to
|
||||
# get the __version__ during a fresh install. That's OK and subsequent calls to
|
||||
# actually start Synapse will import these libraries fine.
|
||||
|
||||
@@ -50,7 +50,7 @@ def format_for_config(public_key: nacl.signing.VerifyKey, expiry_ts: int):
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
@@ -85,7 +85,6 @@ if __name__ == "__main__":
|
||||
else format_plain
|
||||
)
|
||||
|
||||
keys = []
|
||||
for file in args.key_file:
|
||||
try:
|
||||
res = read_signing_keys(file)
|
||||
@@ -98,3 +97,7 @@ if __name__ == "__main__":
|
||||
res = []
|
||||
for key in res:
|
||||
formatter(get_verify_key(key))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -6,7 +6,8 @@ import sys
|
||||
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--config-dir",
|
||||
@@ -76,3 +77,7 @@ if __name__ == "__main__":
|
||||
shutil.copyfileobj(args.header_file, args.output_file)
|
||||
|
||||
args.output_file.write(conf)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -19,7 +19,8 @@ import sys
|
||||
|
||||
from synapse.config.logger import DEFAULT_LOG_CONFIG
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
@@ -42,3 +43,7 @@ if __name__ == "__main__":
|
||||
out = args.output_file
|
||||
out.write(DEFAULT_LOG_CONFIG.substitute(log_file=args.log_file))
|
||||
out.flush()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -19,7 +19,8 @@ from signedjson.key import generate_signing_key, write_signing_keys
|
||||
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
@@ -34,3 +35,7 @@ if __name__ == "__main__":
|
||||
key_id = "a_" + random_string(4)
|
||||
key = (generate_signing_key(key_id),)
|
||||
write_signing_keys(args.output_file, key)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -8,9 +8,6 @@ import unicodedata
|
||||
import bcrypt
|
||||
import yaml
|
||||
|
||||
bcrypt_rounds = 12
|
||||
password_pepper = ""
|
||||
|
||||
|
||||
def prompt_for_pass():
|
||||
password = getpass.getpass("Password: ")
|
||||
@@ -26,7 +23,10 @@ def prompt_for_pass():
|
||||
return password
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main():
|
||||
bcrypt_rounds = 12
|
||||
password_pepper = ""
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Calculate the hash of a new password, so that passwords can be reset"
|
||||
@@ -77,3 +77,7 @@ if __name__ == "__main__":
|
||||
).decode("ascii")
|
||||
|
||||
print(hashed)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -28,7 +28,7 @@ This can be extracted from postgres with::
|
||||
|
||||
To use, pipe the above into::
|
||||
|
||||
PYTHON_PATH=. ./scripts/move_remote_media_to_new_store.py <source repo> <dest repo>
|
||||
PYTHON_PATH=. synapse/_scripts/move_remote_media_to_new_store.py <source repo> <dest repo>
|
||||
"""
|
||||
|
||||
import argparse
|
||||
@@ -1146,7 +1146,7 @@ class TerminalProgress(Progress):
|
||||
##############################################
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="A script to port an existing synapse SQLite database to"
|
||||
" a new PostgreSQL database."
|
||||
@@ -1251,3 +1251,7 @@ if __name__ == "__main__":
|
||||
sys.stderr.write(end_error)
|
||||
|
||||
sys.exit(5)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -178,7 +178,9 @@ class RelationTypes:
|
||||
ANNOTATION: Final = "m.annotation"
|
||||
REPLACE: Final = "m.replace"
|
||||
REFERENCE: Final = "m.reference"
|
||||
THREAD: Final = "io.element.thread"
|
||||
THREAD: Final = "m.thread"
|
||||
# TODO Remove this in Synapse >= v1.57.0.
|
||||
UNSTABLE_THREAD: Final = "io.element.thread"
|
||||
|
||||
|
||||
class LimitBlockingTypes:
|
||||
|
||||
@@ -88,7 +88,9 @@ ROOM_EVENT_FILTER_SCHEMA = {
|
||||
"org.matrix.labels": {"type": "array", "items": {"type": "string"}},
|
||||
"org.matrix.not_labels": {"type": "array", "items": {"type": "string"}},
|
||||
# MSC3440, filtering by event relations.
|
||||
"related_by_senders": {"type": "array", "items": {"type": "string"}},
|
||||
"io.element.relation_senders": {"type": "array", "items": {"type": "string"}},
|
||||
"related_by_rel_types": {"type": "array", "items": {"type": "string"}},
|
||||
"io.element.relation_types": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
}
|
||||
@@ -318,19 +320,18 @@ class Filter:
|
||||
self.labels = filter_json.get("org.matrix.labels", None)
|
||||
self.not_labels = filter_json.get("org.matrix.not_labels", [])
|
||||
|
||||
# Ideally these would be rejected at the endpoint if they were provided
|
||||
# and not supported, but that would involve modifying the JSON schema
|
||||
# based on the homeserver configuration.
|
||||
self.related_by_senders = self.filter_json.get("related_by_senders", None)
|
||||
self.related_by_rel_types = self.filter_json.get("related_by_rel_types", None)
|
||||
|
||||
# Fallback to the unstable prefix if the stable version is not given.
|
||||
if hs.config.experimental.msc3440_enabled:
|
||||
self.relation_senders = self.filter_json.get(
|
||||
self.related_by_senders = self.related_by_senders or self.filter_json.get(
|
||||
"io.element.relation_senders", None
|
||||
)
|
||||
self.relation_types = self.filter_json.get(
|
||||
"io.element.relation_types", None
|
||||
self.related_by_rel_types = (
|
||||
self.related_by_rel_types
|
||||
or self.filter_json.get("io.element.relation_types", None)
|
||||
)
|
||||
else:
|
||||
self.relation_senders = None
|
||||
self.relation_types = None
|
||||
|
||||
def filters_all_types(self) -> bool:
|
||||
return "*" in self.not_types
|
||||
@@ -461,7 +462,7 @@ class Filter:
|
||||
event_ids = [event.event_id for event in events if isinstance(event, EventBase)] # type: ignore[attr-defined]
|
||||
event_ids_to_keep = set(
|
||||
await self._store.events_have_relations(
|
||||
event_ids, self.relation_senders, self.relation_types
|
||||
event_ids, self.related_by_senders, self.related_by_rel_types
|
||||
)
|
||||
)
|
||||
|
||||
@@ -474,7 +475,7 @@ class Filter:
|
||||
async def filter(self, events: Iterable[FilterEvent]) -> List[FilterEvent]:
|
||||
result = [event for event in events if self._check(event)]
|
||||
|
||||
if self.relation_senders or self.relation_types:
|
||||
if self.related_by_senders or self.related_by_rel_types:
|
||||
return await self._check_event_relations(result)
|
||||
|
||||
return result
|
||||
|
||||
@@ -417,7 +417,7 @@ class GenericWorkerServer(HomeServer):
|
||||
else:
|
||||
logger.warning("Unsupported listener type: %s", listener.type)
|
||||
|
||||
self.get_tcp_replication().start_replication(self)
|
||||
self.get_replication_command_handler().start_replication(self)
|
||||
|
||||
|
||||
def start(config_options: List[str]) -> None:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user