Compare commits
224 Commits
erikj/test
...
v1.29.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4de1c35728 | ||
|
|
15c788e22d | ||
|
|
a6333b8d42 | ||
|
|
ea0a3aaf0a | ||
|
|
3f49d80dcf | ||
|
|
33a02f0f52 | ||
|
|
4db07f9aef | ||
|
|
a4fa044c00 | ||
|
|
922788c604 | ||
|
|
d790d0d314 | ||
|
|
0c330423bc | ||
|
|
16f9f93eb7 | ||
|
|
a5daae2a5f | ||
|
|
0279e0e086 | ||
|
|
aee10768d8 | ||
|
|
7f5d753d06 | ||
|
|
16108c579d | ||
|
|
f00c4e7af0 | ||
|
|
ad8589d392 | ||
|
|
16ec8c3272 | ||
|
|
a0bc9d387e | ||
|
|
e12077a78a | ||
|
|
ddb240293a | ||
|
|
15090de850 | ||
|
|
e53f11bd62 | ||
|
|
2566dc57ce | ||
|
|
1e62d9ee8c | ||
|
|
1efdcc3e87 | ||
|
|
2756517f7a | ||
|
|
0f9f30b32b | ||
|
|
b5c4fe1971 | ||
|
|
d8e95e5452 | ||
|
|
00bf80cb8e | ||
|
|
7cc571510b | ||
|
|
f5c93fc993 | ||
|
|
2927921942 | ||
|
|
0b5c967813 | ||
|
|
7292b7c0eb | ||
|
|
713145d3de | ||
|
|
65a9eb8994 | ||
|
|
66f4949e7f | ||
|
|
1b2d6d55c5 | ||
|
|
71c9f8de6d | ||
|
|
70ea9593ff | ||
|
|
0a363f9ca4 | ||
|
|
e22b71810e | ||
|
|
fc8b3d8809 | ||
|
|
179c0953ff | ||
|
|
3a2fe5054f | ||
|
|
a1901abd6b | ||
|
|
c4a55ac4a4 | ||
|
|
d9f1dccba9 | ||
|
|
d0365bc8b0 | ||
|
|
b114a45f5f | ||
|
|
8bcfc2eaad | ||
|
|
13e9029f44 | ||
|
|
3d2acc930f | ||
|
|
9bc74743d5 | ||
|
|
1c5e715e5e | ||
|
|
1381cd05b0 | ||
|
|
2d577283ab | ||
|
|
b106080fb4 | ||
|
|
84a7191410 | ||
|
|
d804285139 | ||
|
|
9ee3b9775f | ||
|
|
90550f598e | ||
|
|
8ad4676f35 | ||
|
|
9d64e4dbd6 | ||
|
|
e17553e185 | ||
|
|
e8e7012265 | ||
|
|
8ec2217103 | ||
|
|
bb2577f6b7 | ||
|
|
43f1c82457 | ||
|
|
626afd7e89 | ||
|
|
c8d9383cfb | ||
|
|
a25661b2eb | ||
|
|
3e5749b99f | ||
|
|
53f1c4da81 | ||
|
|
a8878960c0 | ||
|
|
9e19c6aab4 | ||
|
|
d2f0ec12d5 | ||
|
|
e1071fd625 | ||
|
|
33f64ca7d6 | ||
|
|
0a00b7ff14 | ||
|
|
5636e597c3 | ||
|
|
3b754aea27 | ||
|
|
0ad087273c | ||
|
|
731e08c63a | ||
|
|
ddfdf94506 | ||
|
|
6600f0bd57 | ||
|
|
a27c1fd74b | ||
|
|
74af356baf | ||
|
|
b8b172466f | ||
|
|
ff40c8099d | ||
|
|
594f2853e0 | ||
|
|
7950aa8a27 | ||
|
|
2c9b4a5f16 | ||
|
|
dcb9c2e8ae | ||
|
|
3f2f7efb87 | ||
|
|
40de534238 | ||
|
|
e40d88cff3 | ||
|
|
6aa87f8ce3 | ||
|
|
8a33d217bd | ||
|
|
6dade80048 | ||
|
|
80d6dc9783 | ||
|
|
fb0e14ee9a | ||
|
|
5f716fa777 | ||
|
|
29ae04af3b | ||
|
|
3f58fc848d | ||
|
|
0963d39ea6 | ||
|
|
b0b2cac057 | ||
|
|
d882fbca38 | ||
|
|
5a9cdaa6e9 | ||
|
|
adc96d4236 | ||
|
|
7e8083eb48 | ||
|
|
982d9eb211 | ||
|
|
792263c97c | ||
|
|
2ab6e67ab7 | ||
|
|
2814028ce5 | ||
|
|
b0f4119b8b | ||
|
|
3f534d3fdf | ||
|
|
17f2a512f3 | ||
|
|
e288499c60 | ||
|
|
afa18f1baa | ||
|
|
ce669863b9 | ||
|
|
7a0dcea3e5 | ||
|
|
f20dadb649 | ||
|
|
e4cdecb310 | ||
|
|
e1943d1353 | ||
|
|
4ca054a4ea | ||
|
|
ff55300b91 | ||
|
|
96e460df2e | ||
|
|
eec9ab3225 | ||
|
|
2610930721 | ||
|
|
b60bb28bbc | ||
|
|
8f75bf1df7 | ||
|
|
846b9d3df0 | ||
|
|
d1f13c7485 | ||
|
|
8fee6a3ab2 | ||
|
|
351845452c | ||
|
|
5963426b95 | ||
|
|
f30c3a99be | ||
|
|
c543bf87ec | ||
|
|
e5d70c8a82 | ||
|
|
5d38a3c97f | ||
|
|
419313b06a | ||
|
|
85c56b5a67 | ||
|
|
18ab35284a | ||
|
|
43dd93bb26 | ||
|
|
a800603561 | ||
|
|
4167494c90 | ||
|
|
8aed29dc61 | ||
|
|
9c715a5f19 | ||
|
|
f78d07bf00 | ||
|
|
13c7ab8181 | ||
|
|
f2c1560eca | ||
|
|
e19396d622 | ||
|
|
c14688d44a | ||
|
|
0d81a6fa3e | ||
|
|
4b73488e81 | ||
|
|
54a6afeee3 | ||
|
|
31d072aea0 | ||
|
|
a78016dadf | ||
|
|
93f84e0373 | ||
|
|
b755f60ce2 | ||
|
|
a764869623 | ||
|
|
b859919acc | ||
|
|
de7f049527 | ||
|
|
fe52dae6bd | ||
|
|
10332c175c | ||
|
|
34efb4c604 | ||
|
|
a083aea396 | ||
|
|
869667760f | ||
|
|
00e97a7774 | ||
|
|
ccb9616f26 | ||
|
|
2e537a0280 | ||
|
|
300d0d756a | ||
|
|
fbd9de6d1f | ||
|
|
7fa1346f93 | ||
|
|
17b713850f | ||
|
|
b685c5e7f1 | ||
|
|
e54746bdf7 | ||
|
|
71c46652a2 | ||
|
|
73ed289bd2 | ||
|
|
93b61589b0 | ||
|
|
cfcc4bfcaf | ||
|
|
a737cc2713 | ||
|
|
a64c29926e | ||
|
|
1baab20352 | ||
|
|
26837d5dbe | ||
|
|
dd8da8c5f6 | ||
|
|
4937fe3d6b | ||
|
|
e74bb96733 | ||
|
|
e5b659e9e1 | ||
|
|
a1ff1e967f | ||
|
|
4936fc59fc | ||
|
|
cee4010f94 | ||
|
|
e20f18a766 | ||
|
|
fdf8346944 | ||
|
|
5b857b77f7 | ||
|
|
4a55d267ee | ||
|
|
2547d9d4d7 | ||
|
|
65fb3b2e25 | ||
|
|
a71be9d62d | ||
|
|
fe18882bb5 | ||
|
|
e448dbbf5b | ||
|
|
69961c7e9f | ||
|
|
a01605c136 | ||
|
|
6f7417c3db | ||
|
|
8965b6cfec | ||
|
|
930ba00971 | ||
|
|
056327457f | ||
|
|
28f255d5f3 | ||
|
|
c177faf5a9 | ||
|
|
49c619a9a2 | ||
|
|
da16d06301 | ||
|
|
0b77329fe2 | ||
|
|
b52fb703f7 | ||
|
|
e2c16edc78 | ||
|
|
2eb421b606 | ||
|
|
90ad4d443a | ||
|
|
85c0999bfb | ||
|
|
c91045f56c | ||
|
|
b849e46139 |
@@ -10,4 +10,7 @@ apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev x
|
|||||||
|
|
||||||
export LANG="C.UTF-8"
|
export LANG="C.UTF-8"
|
||||||
|
|
||||||
|
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||||
|
export VIRTUALENV_NO_DOWNLOAD=1
|
||||||
|
|
||||||
exec tox -e py35-old,combine
|
exec tox -e py35-old,combine
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ jobs:
|
|||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
- docker_build:
|
- docker_build:
|
||||||
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
dockerhubuploadlatest:
|
dockerhubuploadlatest:
|
||||||
docker:
|
docker:
|
||||||
@@ -27,7 +27,7 @@ jobs:
|
|||||||
# until all of the platforms are built.
|
# until all of the platforms are built.
|
||||||
- docker_build:
|
- docker_build:
|
||||||
tag: -t matrixdotorg/synapse:latest
|
tag: -t matrixdotorg/synapse:latest
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
workflows:
|
workflows:
|
||||||
build:
|
build:
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -6,13 +6,14 @@
|
|||||||
*.egg
|
*.egg
|
||||||
*.egg-info
|
*.egg-info
|
||||||
*.lock
|
*.lock
|
||||||
*.pyc
|
*.py[cod]
|
||||||
*.snap
|
*.snap
|
||||||
*.tac
|
*.tac
|
||||||
_trial_temp/
|
_trial_temp/
|
||||||
_trial_temp*/
|
_trial_temp*/
|
||||||
/out
|
/out
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
__pycache__/
|
||||||
|
|
||||||
# stuff that is likely to exist when you run a server locally
|
# stuff that is likely to exist when you run a server locally
|
||||||
/*.db
|
/*.db
|
||||||
|
|||||||
300
CHANGES.md
300
CHANGES.md
@@ -1,9 +1,305 @@
|
|||||||
|
Synapse 1.29.0 (2021-03-08)
|
||||||
|
===========================
|
||||||
|
|
||||||
|
Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see [UPGRADE.rst](UPGRADE.rst#upgrading-to-v1290) for more details on this change.
|
||||||
|
|
||||||
|
|
||||||
|
No significant changes.
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.29.0rc1 (2021-03-04)
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Add rate limiters to cross-user key sharing requests. ([\#8957](https://github.com/matrix-org/synapse/issues/8957))
|
||||||
|
- Add `order_by` to the admin API `GET /_synapse/admin/v1/users/<user_id>/media`. Contributed by @dklimpel. ([\#8978](https://github.com/matrix-org/synapse/issues/8978))
|
||||||
|
- Add some configuration settings to make users' profile data more private. ([\#9203](https://github.com/matrix-org/synapse/issues/9203))
|
||||||
|
- The `no_proxy` and `NO_PROXY` environment variables are now respected in proxied HTTP clients with the lowercase form taking precedence if both are present. Additionally, the lowercase `https_proxy` environment variable is now respected in proxied HTTP clients on top of existing support for the uppercase `HTTPS_PROXY` form and takes precedence if both are present. Contributed by Timothy Leung. ([\#9372](https://github.com/matrix-org/synapse/issues/9372))
|
||||||
|
- Add a configuration option, `user_directory.prefer_local_users`, which when enabled will make it more likely for users on the same server as you to appear above other users. ([\#9383](https://github.com/matrix-org/synapse/issues/9383), [\#9385](https://github.com/matrix-org/synapse/issues/9385))
|
||||||
|
- Add support for regenerating thumbnails if they have been deleted but the original image is still stored. ([\#9438](https://github.com/matrix-org/synapse/issues/9438))
|
||||||
|
- Add support for `X-Forwarded-Proto` header when using a reverse proxy. ([\#9472](https://github.com/matrix-org/synapse/issues/9472), [\#9501](https://github.com/matrix-org/synapse/issues/9501), [\#9512](https://github.com/matrix-org/synapse/issues/9512), [\#9539](https://github.com/matrix-org/synapse/issues/9539))
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix a bug where users' pushers were not all deleted when they deactivated their account. ([\#9285](https://github.com/matrix-org/synapse/issues/9285), [\#9516](https://github.com/matrix-org/synapse/issues/9516))
|
||||||
|
- Fix a bug where a lot of unnecessary presence updates were sent when joining a room. ([\#9402](https://github.com/matrix-org/synapse/issues/9402))
|
||||||
|
- Fix a bug that caused multiple calls to the experimental `shared_rooms` endpoint to return stale results. ([\#9416](https://github.com/matrix-org/synapse/issues/9416))
|
||||||
|
- Fix a bug in single sign-on which could cause a "No session cookie found" error. ([\#9436](https://github.com/matrix-org/synapse/issues/9436))
|
||||||
|
- Fix bug introduced in v1.27.0 where allowing a user to choose their own username when logging in via single sign-on did not work unless an `idp_icon` was defined. ([\#9440](https://github.com/matrix-org/synapse/issues/9440))
|
||||||
|
- Fix a bug introduced in v1.26.0 where some sequences were not properly configured when running `synapse_port_db`. ([\#9449](https://github.com/matrix-org/synapse/issues/9449))
|
||||||
|
- Fix deleting pushers when using sharded pushers. ([\#9465](https://github.com/matrix-org/synapse/issues/9465), [\#9466](https://github.com/matrix-org/synapse/issues/9466), [\#9479](https://github.com/matrix-org/synapse/issues/9479), [\#9536](https://github.com/matrix-org/synapse/issues/9536))
|
||||||
|
- Fix missing startup checks for the consistency of certain PostgreSQL sequences. ([\#9470](https://github.com/matrix-org/synapse/issues/9470))
|
||||||
|
- Fix a long-standing bug where the media repository could leak file descriptors while previewing media. ([\#9497](https://github.com/matrix-org/synapse/issues/9497))
|
||||||
|
- Properly purge the event chain cover index when purging history. ([\#9498](https://github.com/matrix-org/synapse/issues/9498))
|
||||||
|
- Fix missing chain cover index due to a schema delta not being applied correctly. Only affected servers that ran development versions. ([\#9503](https://github.com/matrix-org/synapse/issues/9503))
|
||||||
|
- Fix a bug introduced in v1.25.0 where `/_synapse/admin/join/` would fail when given a room alias. ([\#9506](https://github.com/matrix-org/synapse/issues/9506))
|
||||||
|
- Prevent presence background jobs from running when presence is disabled. ([\#9530](https://github.com/matrix-org/synapse/issues/9530))
|
||||||
|
- Fix rare edge case that caused a background update to fail if the server had rejected an event that had duplicate auth events. ([\#9537](https://github.com/matrix-org/synapse/issues/9537))
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Update the example systemd config to propagate reloads to individual units. ([\#9463](https://github.com/matrix-org/synapse/issues/9463))
|
||||||
|
|
||||||
|
|
||||||
|
Internal Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
- Add documentation and type hints to `parse_duration`. ([\#9432](https://github.com/matrix-org/synapse/issues/9432))
|
||||||
|
- Remove vestiges of `uploads_path` configuration setting. ([\#9462](https://github.com/matrix-org/synapse/issues/9462))
|
||||||
|
- Add a comment about systemd-python. ([\#9464](https://github.com/matrix-org/synapse/issues/9464))
|
||||||
|
- Test that we require validated email for email pushers. ([\#9496](https://github.com/matrix-org/synapse/issues/9496))
|
||||||
|
- Allow python to generate bytecode for synapse. ([\#9502](https://github.com/matrix-org/synapse/issues/9502))
|
||||||
|
- Fix incorrect type hints. ([\#9515](https://github.com/matrix-org/synapse/issues/9515), [\#9518](https://github.com/matrix-org/synapse/issues/9518))
|
||||||
|
- Add type hints to device and event report admin API. ([\#9519](https://github.com/matrix-org/synapse/issues/9519))
|
||||||
|
- Add type hints to user admin API. ([\#9521](https://github.com/matrix-org/synapse/issues/9521))
|
||||||
|
- Bump the versions of mypy and mypy-zope used for static type checking. ([\#9529](https://github.com/matrix-org/synapse/issues/9529))
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.28.0 (2021-02-25)
|
||||||
|
===========================
|
||||||
|
|
||||||
|
Note that this release drops support for ARMv7 in the official Docker images, due to repeated problems building for ARMv7 (and the associated maintenance burden this entails).
|
||||||
|
|
||||||
|
This release also fixes the documentation included in v1.27.0 around the callback URI for SAML2 identity providers. If your server is configured to use single sign-on via a SAML2 IdP, you may need to make configuration changes. Please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
|
||||||
|
|
||||||
|
|
||||||
|
Internal Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
- Revert change in v1.28.0rc1 to remove the deprecated SAML endpoint. ([\#9474](https://github.com/matrix-org/synapse/issues/9474))
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.28.0rc1 (2021-02-19)
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Removal warning
|
||||||
|
---------------
|
||||||
|
|
||||||
|
The v1 list accounts API is deprecated and will be removed in a future release.
|
||||||
|
This API was undocumented and misleading. It can be replaced by the
|
||||||
|
[v2 list accounts API](https://github.com/matrix-org/synapse/blob/release-v1.28.0/docs/admin_api/user_admin_api.rst#list-accounts),
|
||||||
|
which has been available since Synapse 1.7.0 (2019-12-13).
|
||||||
|
|
||||||
|
Please check if you're using any scripts which use the admin API and replace
|
||||||
|
`GET /_synapse/admin/v1/users/<user_id>` with `GET /_synapse/admin/v2/users`.
|
||||||
|
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- New admin API to get the context of an event: `/_synapse/admin/rooms/{roomId}/context/{eventId}`. ([\#9150](https://github.com/matrix-org/synapse/issues/9150))
|
||||||
|
- Further improvements to the user experience of registration via single sign-on. ([\#9300](https://github.com/matrix-org/synapse/issues/9300), [\#9301](https://github.com/matrix-org/synapse/issues/9301))
|
||||||
|
- Add hook to spam checker modules that allow checking file uploads and remote downloads. ([\#9311](https://github.com/matrix-org/synapse/issues/9311))
|
||||||
|
- Add support for receiving OpenID Connect authentication responses via form `POST`s rather than `GET`s. ([\#9376](https://github.com/matrix-org/synapse/issues/9376))
|
||||||
|
- Add the shadow-banning status to the admin API for user info. ([\#9400](https://github.com/matrix-org/synapse/issues/9400))
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix long-standing bug where sending email notifications would fail for rooms that the server had since left. ([\#9257](https://github.com/matrix-org/synapse/issues/9257))
|
||||||
|
- Fix bug introduced in Synapse 1.27.0rc1 which meant the "session expired" error page during SSO registration was badly formatted. ([\#9296](https://github.com/matrix-org/synapse/issues/9296))
|
||||||
|
- Assert a maximum length for some parameters for spec compliance. ([\#9321](https://github.com/matrix-org/synapse/issues/9321), [\#9393](https://github.com/matrix-org/synapse/issues/9393))
|
||||||
|
- Fix additional errors when previewing URLs: "AttributeError 'NoneType' object has no attribute 'xpath'" and "ValueError: Unicode strings with encoding declaration are not supported. Please use bytes input or XML fragments without declaration.". ([\#9333](https://github.com/matrix-org/synapse/issues/9333))
|
||||||
|
- Fix a bug causing Synapse to impose the wrong type constraints on fields when processing responses from appservices to `/_matrix/app/v1/thirdparty/user/{protocol}`. ([\#9361](https://github.com/matrix-org/synapse/issues/9361))
|
||||||
|
- Fix bug where Synapse would occasionally stop reconnecting to Redis after the connection was lost. ([\#9391](https://github.com/matrix-org/synapse/issues/9391))
|
||||||
|
- Fix a long-standing bug when upgrading a room: "TypeError: '>' not supported between instances of 'NoneType' and 'int'". ([\#9395](https://github.com/matrix-org/synapse/issues/9395))
|
||||||
|
- Reduce the amount of memory used when generating the URL preview of a file that is larger than the `max_spider_size`. ([\#9421](https://github.com/matrix-org/synapse/issues/9421))
|
||||||
|
- Fix a long-standing bug in the deduplication of old presence, resulting in no deduplication. ([\#9425](https://github.com/matrix-org/synapse/issues/9425))
|
||||||
|
- The `ui_auth.session_timeout` config option can now be specified in terms of number of seconds/minutes/etc/. Contributed by Rishabh Arya. ([\#9426](https://github.com/matrix-org/synapse/issues/9426))
|
||||||
|
- Fix a bug introduced in v1.27.0: "TypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType." related to the user directory. ([\#9428](https://github.com/matrix-org/synapse/issues/9428))
|
||||||
|
|
||||||
|
|
||||||
|
Updates to the Docker image
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
- Drop support for ARMv7 in Docker images. ([\#9433](https://github.com/matrix-org/synapse/issues/9433))
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Reorganize CHANGELOG.md. ([\#9281](https://github.com/matrix-org/synapse/issues/9281))
|
||||||
|
- Add note to `auto_join_rooms` config option explaining existing rooms must be publicly joinable. ([\#9291](https://github.com/matrix-org/synapse/issues/9291))
|
||||||
|
- Correct name of Synapse's service file in TURN howto. ([\#9308](https://github.com/matrix-org/synapse/issues/9308))
|
||||||
|
- Fix the braces in the `oidc_providers` section of the sample config. ([\#9317](https://github.com/matrix-org/synapse/issues/9317))
|
||||||
|
- Update installation instructions on Fedora. ([\#9322](https://github.com/matrix-org/synapse/issues/9322))
|
||||||
|
- Add HTTP/2 support to the nginx example configuration. Contributed by David Vo. ([\#9390](https://github.com/matrix-org/synapse/issues/9390))
|
||||||
|
- Update docs for using Gitea as OpenID provider. ([\#9404](https://github.com/matrix-org/synapse/issues/9404))
|
||||||
|
- Document that pusher instances are shardable. ([\#9407](https://github.com/matrix-org/synapse/issues/9407))
|
||||||
|
- Fix erroneous documentation from v1.27.0 about updating the SAML2 callback URL. ([\#9434](https://github.com/matrix-org/synapse/issues/9434))
|
||||||
|
|
||||||
|
|
||||||
|
Deprecations and Removals
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
- Deprecate old admin API `GET /_synapse/admin/v1/users/<user_id>`. ([\#9429](https://github.com/matrix-org/synapse/issues/9429))
|
||||||
|
|
||||||
|
|
||||||
|
Internal Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
- Fix 'object name reserved for internal use' errors with recent versions of SQLite. ([\#9003](https://github.com/matrix-org/synapse/issues/9003))
|
||||||
|
- Add experimental support for running Synapse with PyPy. ([\#9123](https://github.com/matrix-org/synapse/issues/9123))
|
||||||
|
- Deny access to additional IP addresses by default. ([\#9240](https://github.com/matrix-org/synapse/issues/9240))
|
||||||
|
- Update the `Cursor` type hints to better match PEP 249. ([\#9299](https://github.com/matrix-org/synapse/issues/9299))
|
||||||
|
- Add debug logging for SRV lookups. Contributed by @Bubu. ([\#9305](https://github.com/matrix-org/synapse/issues/9305))
|
||||||
|
- Improve logging for OIDC login flow. ([\#9307](https://github.com/matrix-org/synapse/issues/9307))
|
||||||
|
- Share the code for handling required attributes between the CAS and SAML handlers. ([\#9326](https://github.com/matrix-org/synapse/issues/9326))
|
||||||
|
- Clean up the code to load the metadata for OpenID Connect identity providers. ([\#9362](https://github.com/matrix-org/synapse/issues/9362))
|
||||||
|
- Convert tests to use `HomeserverTestCase`. ([\#9377](https://github.com/matrix-org/synapse/issues/9377), [\#9396](https://github.com/matrix-org/synapse/issues/9396))
|
||||||
|
- Update the version of black used to 20.8b1. ([\#9381](https://github.com/matrix-org/synapse/issues/9381))
|
||||||
|
- Allow OIDC config to override discovered values. ([\#9384](https://github.com/matrix-org/synapse/issues/9384))
|
||||||
|
- Remove some dead code from the acceptance of room invites path. ([\#9394](https://github.com/matrix-org/synapse/issues/9394))
|
||||||
|
- Clean up an unused method in the presence handler code. ([\#9408](https://github.com/matrix-org/synapse/issues/9408))
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.27.0 (2021-02-16)
|
||||||
|
===========================
|
||||||
|
|
||||||
|
Note that this release includes a change in Synapse to use Redis as a cache ─ as well as a pub/sub mechanism ─ if Redis support is enabled for workers. No action is needed by server administrators, and we do not expect resource usage of the Redis instance to change dramatically.
|
||||||
|
|
||||||
|
This release also changes the callback URI for OpenID Connect (OIDC) and SAML2 identity providers. If your server is configured to use single sign-on via an OIDC/OAuth2 or SAML2 IdP, you may need to make configuration changes. Please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
|
||||||
|
|
||||||
|
This release also changes escaping of variables in the HTML templates for SSO or email notifications. If you have customised these templates, please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix building Docker images for armv7. ([\#9405](https://github.com/matrix-org/synapse/issues/9405))
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.27.0rc2 (2021-02-11)
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Further improvements to the user experience of registration via single sign-on. ([\#9297](https://github.com/matrix-org/synapse/issues/9297))
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix ratelimiting introduced in v1.27.0rc1 for invites to respect the `ratelimit` flag on application services. ([\#9302](https://github.com/matrix-org/synapse/issues/9302))
|
||||||
|
- Do not automatically calculate `public_baseurl` since it can be wrong in some situations. Reverts behaviour introduced in v1.26.0. ([\#9313](https://github.com/matrix-org/synapse/issues/9313))
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Clarify the sample configuration for changes made to the template loading code. ([\#9310](https://github.com/matrix-org/synapse/issues/9310))
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.27.0rc1 (2021-02-02)
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Add an admin API for getting and deleting forward extremities for a room. ([\#9062](https://github.com/matrix-org/synapse/issues/9062))
|
||||||
|
- Add an admin API for retrieving the current room state of a room. ([\#9168](https://github.com/matrix-org/synapse/issues/9168))
|
||||||
|
- Add experimental support for allowing clients to pick an SSO Identity Provider ([MSC2858](https://github.com/matrix-org/matrix-doc/pull/2858)). ([\#9183](https://github.com/matrix-org/synapse/issues/9183), [\#9242](https://github.com/matrix-org/synapse/issues/9242))
|
||||||
|
- Add an admin API endpoint for shadow-banning users. ([\#9209](https://github.com/matrix-org/synapse/issues/9209))
|
||||||
|
- Add ratelimits to the 3PID `/requestToken` APIs. ([\#9238](https://github.com/matrix-org/synapse/issues/9238))
|
||||||
|
- Add support to the OpenID Connect integration for adding the user's email address. ([\#9245](https://github.com/matrix-org/synapse/issues/9245))
|
||||||
|
- Add ratelimits to invites in rooms and to specific users. ([\#9258](https://github.com/matrix-org/synapse/issues/9258))
|
||||||
|
- Improve the user experience of setting up an account via single-sign on. ([\#9262](https://github.com/matrix-org/synapse/issues/9262), [\#9272](https://github.com/matrix-org/synapse/issues/9272), [\#9275](https://github.com/matrix-org/synapse/issues/9275), [\#9276](https://github.com/matrix-org/synapse/issues/9276), [\#9277](https://github.com/matrix-org/synapse/issues/9277), [\#9286](https://github.com/matrix-org/synapse/issues/9286), [\#9287](https://github.com/matrix-org/synapse/issues/9287))
|
||||||
|
- Add phone home stats for encrypted messages. ([\#9283](https://github.com/matrix-org/synapse/issues/9283))
|
||||||
|
- Update the redirect URI for OIDC authentication. ([\#9288](https://github.com/matrix-org/synapse/issues/9288))
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix spurious errors in logs when deleting a non-existant pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
|
||||||
|
- Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163))
|
||||||
|
- Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164))
|
||||||
|
- Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165))
|
||||||
|
- Fix bug where we sometimes didn't detect that Redis connections had died, causing workers to not see new data. ([\#9218](https://github.com/matrix-org/synapse/issues/9218))
|
||||||
|
- Fix a bug where `None` was passed to Synapse modules instead of an empty dictionary if an empty module `config` block was provided in the homeserver config. ([\#9229](https://github.com/matrix-org/synapse/issues/9229))
|
||||||
|
- Fix a bug in the `make_room_admin` admin API where it failed if the admin with the greatest power level was not in the room. Contributed by Pankaj Yadav. ([\#9235](https://github.com/matrix-org/synapse/issues/9235))
|
||||||
|
- Prevent password hashes from getting dropped if a client failed threepid validation during a User Interactive Auth stage. Removes a workaround for an ancient bug in Riot Web <v0.7.4. ([\#9265](https://github.com/matrix-org/synapse/issues/9265))
|
||||||
|
- Fix single-sign-on when the endpoints are routed to synapse workers. ([\#9271](https://github.com/matrix-org/synapse/issues/9271))
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Add docs for using Gitea as OpenID provider. ([\#9134](https://github.com/matrix-org/synapse/issues/9134))
|
||||||
|
- Add link to Matrix VoIP tester for turn-howto. ([\#9135](https://github.com/matrix-org/synapse/issues/9135))
|
||||||
|
- Add notes on integrating with Facebook for SSO login. ([\#9244](https://github.com/matrix-org/synapse/issues/9244))
|
||||||
|
|
||||||
|
|
||||||
|
Deprecations and Removals
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
- The `service_url` parameter in `cas_config` is deprecated in favor of `public_baseurl`. ([\#9199](https://github.com/matrix-org/synapse/issues/9199))
|
||||||
|
- Add new endpoint `/_synapse/client/saml2` for SAML2 authentication callbacks, and deprecate the old endpoint `/_matrix/saml2`. ([\#9289](https://github.com/matrix-org/synapse/issues/9289))
|
||||||
|
|
||||||
|
|
||||||
|
Internal Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
- Add tests to `test_user.UsersListTestCase` for List Users Admin API. ([\#9045](https://github.com/matrix-org/synapse/issues/9045))
|
||||||
|
- Various improvements to the federation client. ([\#9129](https://github.com/matrix-org/synapse/issues/9129))
|
||||||
|
- Speed up chain cover calculation when persisting a batch of state events at once. ([\#9176](https://github.com/matrix-org/synapse/issues/9176))
|
||||||
|
- Add a `long_description_type` to the package metadata. ([\#9180](https://github.com/matrix-org/synapse/issues/9180))
|
||||||
|
- Speed up batch insertion when using PostgreSQL. ([\#9181](https://github.com/matrix-org/synapse/issues/9181), [\#9188](https://github.com/matrix-org/synapse/issues/9188))
|
||||||
|
- Emit an error at startup if different Identity Providers are configured with the same `idp_id`. ([\#9184](https://github.com/matrix-org/synapse/issues/9184))
|
||||||
|
- Improve performance of concurrent use of `StreamIDGenerators`. ([\#9190](https://github.com/matrix-org/synapse/issues/9190))
|
||||||
|
- Add some missing source directories to the automatic linting script. ([\#9191](https://github.com/matrix-org/synapse/issues/9191))
|
||||||
|
- Precompute joined hosts and store in Redis. ([\#9198](https://github.com/matrix-org/synapse/issues/9198), [\#9227](https://github.com/matrix-org/synapse/issues/9227))
|
||||||
|
- Clean-up template loading code. ([\#9200](https://github.com/matrix-org/synapse/issues/9200))
|
||||||
|
- Fix the Python 3.5 old dependencies build. ([\#9217](https://github.com/matrix-org/synapse/issues/9217))
|
||||||
|
- Update `isort` to v5.7.0 to bypass a bug where it would disagree with `black` about formatting. ([\#9222](https://github.com/matrix-org/synapse/issues/9222))
|
||||||
|
- Add type hints to handlers code. ([\#9223](https://github.com/matrix-org/synapse/issues/9223), [\#9232](https://github.com/matrix-org/synapse/issues/9232))
|
||||||
|
- Fix Debian package building on Ubuntu 16.04 LTS (Xenial). ([\#9254](https://github.com/matrix-org/synapse/issues/9254))
|
||||||
|
- Minor performance improvement during TLS handshake. ([\#9255](https://github.com/matrix-org/synapse/issues/9255))
|
||||||
|
- Refactor the generation of summary text for email notifications. ([\#9260](https://github.com/matrix-org/synapse/issues/9260))
|
||||||
|
- Restore PyPy compatibility by not calling CPython-specific GC methods when under PyPy. ([\#9270](https://github.com/matrix-org/synapse/issues/9270))
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.26.0 (2021-01-27)
|
||||||
|
===========================
|
||||||
|
|
||||||
|
This release brings a new schema version for Synapse and rolling back to a previous
|
||||||
|
version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
|
||||||
|
on these changes and for general upgrade guidance.
|
||||||
|
|
||||||
|
No significant changes since 1.26.0rc2.
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.26.0rc2 (2021-01-25)
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix receipts and account data not being sent down sync. Introduced in v1.26.0rc1. ([\#9193](https://github.com/matrix-org/synapse/issues/9193), [\#9195](https://github.com/matrix-org/synapse/issues/9195))
|
||||||
|
- Fix chain cover update to handle events with duplicate auth events. Introduced in v1.26.0rc1. ([\#9210](https://github.com/matrix-org/synapse/issues/9210))
|
||||||
|
|
||||||
|
|
||||||
|
Internal Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
- Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration. ([\#9189](https://github.com/matrix-org/synapse/issues/9189))
|
||||||
|
- Bump minimum `psycopg2` version to v2.8. ([\#9204](https://github.com/matrix-org/synapse/issues/9204))
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.26.0rc1 (2021-01-20)
|
Synapse 1.26.0rc1 (2021-01-20)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
This release brings a new schema version for Synapse and rolling back to a previous
|
This release brings a new schema version for Synapse and rolling back to a previous
|
||||||
version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
|
version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
|
||||||
on these changes and for general upgrade guidance.
|
on these changes and for general upgrade guidance.
|
||||||
|
|
||||||
Features
|
Features
|
||||||
--------
|
--------
|
||||||
|
|||||||
271
CONTRIBUTING.md
271
CONTRIBUTING.md
@@ -1,4 +1,31 @@
|
|||||||
# Contributing code to Synapse
|
Welcome to Synapse
|
||||||
|
|
||||||
|
This document aims to get you started with contributing to this repo!
|
||||||
|
|
||||||
|
- [1. Who can contribute to Synapse?](#1-who-can-contribute-to-synapse)
|
||||||
|
- [2. What do I need?](#2-what-do-i-need)
|
||||||
|
- [3. Get the source.](#3-get-the-source)
|
||||||
|
- [4. Install the dependencies](#4-install-the-dependencies)
|
||||||
|
* [Under Unix (macOS, Linux, BSD, ...)](#under-unix-macos-linux-bsd-)
|
||||||
|
* [Under Windows](#under-windows)
|
||||||
|
- [5. Get in touch.](#5-get-in-touch)
|
||||||
|
- [6. Pick an issue.](#6-pick-an-issue)
|
||||||
|
- [7. Turn coffee and documentation into code and documentation!](#7-turn-coffee-and-documentation-into-code-and-documentation)
|
||||||
|
- [8. Test, test, test!](#8-test-test-test)
|
||||||
|
* [Run the linters.](#run-the-linters)
|
||||||
|
* [Run the unit tests.](#run-the-unit-tests)
|
||||||
|
* [Run the integration tests.](#run-the-integration-tests)
|
||||||
|
- [9. Submit your patch.](#9-submit-your-patch)
|
||||||
|
* [Changelog](#changelog)
|
||||||
|
+ [How do I know what to call the changelog file before I create the PR?](#how-do-i-know-what-to-call-the-changelog-file-before-i-create-the-pr)
|
||||||
|
+ [Debian changelog](#debian-changelog)
|
||||||
|
* [Sign off](#sign-off)
|
||||||
|
- [10. Turn feedback into better code.](#10-turn-feedback-into-better-code)
|
||||||
|
- [11. Find a new issue.](#11-find-a-new-issue)
|
||||||
|
- [Notes for maintainers on merging PRs etc](#notes-for-maintainers-on-merging-prs-etc)
|
||||||
|
- [Conclusion](#conclusion)
|
||||||
|
|
||||||
|
# 1. Who can contribute to Synapse?
|
||||||
|
|
||||||
Everyone is welcome to contribute code to [matrix.org
|
Everyone is welcome to contribute code to [matrix.org
|
||||||
projects](https://github.com/matrix-org), provided that they are willing to
|
projects](https://github.com/matrix-org), provided that they are willing to
|
||||||
@@ -9,70 +36,179 @@ license the code under the same terms as the project's overall 'outbound'
|
|||||||
license - in our case, this is almost always Apache Software License v2 (see
|
license - in our case, this is almost always Apache Software License v2 (see
|
||||||
[LICENSE](LICENSE)).
|
[LICENSE](LICENSE)).
|
||||||
|
|
||||||
## How to contribute
|
# 2. What do I need?
|
||||||
|
|
||||||
|
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://wiki.python.org/moin/BeginnersGuide/Download).
|
||||||
|
|
||||||
|
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
||||||
|
|
||||||
|
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||||
|
|
||||||
|
|
||||||
|
# 3. Get the source.
|
||||||
|
|
||||||
The preferred and easiest way to contribute changes is to fork the relevant
|
The preferred and easiest way to contribute changes is to fork the relevant
|
||||||
project on github, and then [create a pull request](
|
project on GitHub, and then [create a pull request](
|
||||||
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
||||||
changes into our repo.
|
changes into our repo.
|
||||||
|
|
||||||
Some other points to follow:
|
Please base your changes on the `develop` branch.
|
||||||
|
|
||||||
* Please base your changes on the `develop` branch.
|
```sh
|
||||||
|
git clone git@github.com:YOUR_GITHUB_USER_NAME/synapse.git
|
||||||
|
git checkout develop
|
||||||
|
```
|
||||||
|
|
||||||
* Please follow the [code style requirements](#code-style).
|
If you need help getting started with git, this is beyond the scope of the document, but you
|
||||||
|
can find many good git tutorials on the web.
|
||||||
|
|
||||||
* Please include a [changelog entry](#changelog) with each PR.
|
# 4. Install the dependencies
|
||||||
|
|
||||||
* Please [sign off](#sign-off) your contribution.
|
## Under Unix (macOS, Linux, BSD, ...)
|
||||||
|
|
||||||
* Please keep an eye on the pull request for feedback from the [continuous
|
Once you have installed Python 3 and added the source, please open a terminal and
|
||||||
integration system](#continuous-integration-and-testing) and try to fix any
|
setup a *virtualenv*, as follows:
|
||||||
errors that come up.
|
|
||||||
|
|
||||||
* If you need to [update your PR](#updating-your-pull-request), just add new
|
```sh
|
||||||
commits to your branch rather than rebasing.
|
cd path/where/you/have/cloned/the/repository
|
||||||
|
python3 -m venv ./env
|
||||||
|
source ./env/bin/activate
|
||||||
|
pip install -e ".[all,lint,mypy,test]"
|
||||||
|
pip install tox
|
||||||
|
```
|
||||||
|
|
||||||
## Code style
|
This will install the developer dependencies for the project.
|
||||||
|
|
||||||
|
## Under Windows
|
||||||
|
|
||||||
|
TBD
|
||||||
|
|
||||||
|
|
||||||
|
# 5. Get in touch.
|
||||||
|
|
||||||
|
Join our developer community on Matrix: #synapse-dev:matrix.org !
|
||||||
|
|
||||||
|
|
||||||
|
# 6. Pick an issue.
|
||||||
|
|
||||||
|
Fix your favorite problem or perhaps find a [Good First Issue](https://github.com/matrix-org/synapse/issues?q=is%3Aopen+is%3Aissue+label%3A%22Good+First+Issue%22)
|
||||||
|
to work on.
|
||||||
|
|
||||||
|
|
||||||
|
# 7. Turn coffee and documentation into code and documentation!
|
||||||
|
|
||||||
Synapse's code style is documented [here](docs/code_style.md). Please follow
|
Synapse's code style is documented [here](docs/code_style.md). Please follow
|
||||||
it, including the conventions for the [sample configuration
|
it, including the conventions for the [sample configuration
|
||||||
file](docs/code_style.md#configuration-file-format).
|
file](docs/code_style.md#configuration-file-format).
|
||||||
|
|
||||||
Many of the conventions are enforced by scripts which are run as part of the
|
There is a growing amount of documentation located in the [docs](docs)
|
||||||
[continuous integration system](#continuous-integration-and-testing). To help
|
directory. This documentation is intended primarily for sysadmins running their
|
||||||
check if you have followed the code style, you can run `scripts-dev/lint.sh`
|
own Synapse instance, as well as developers interacting externally with
|
||||||
locally. You'll need python 3.6 or later, and to install a number of tools:
|
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
|
||||||
|
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
|
||||||
|
regarding Synapse's Admin API, which is used mostly by sysadmins and external
|
||||||
|
service developers.
|
||||||
|
|
||||||
```
|
If you add new files added to either of these folders, please use [GitHub-Flavoured
|
||||||
# Install the dependencies
|
Markdown](https://guides.github.com/features/mastering-markdown/).
|
||||||
pip install -e ".[lint,mypy]"
|
|
||||||
|
|
||||||
# Run the linter script
|
Some documentation also exists in [Synapse's GitHub
|
||||||
|
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
||||||
|
contributed to by community authors.
|
||||||
|
|
||||||
|
|
||||||
|
# 8. Test, test, test!
|
||||||
|
<a name="test-test-test"></a>
|
||||||
|
|
||||||
|
While you're developing and before submitting a patch, you'll
|
||||||
|
want to test your code.
|
||||||
|
|
||||||
|
## Run the linters.
|
||||||
|
|
||||||
|
The linters look at your code and do two things:
|
||||||
|
|
||||||
|
- ensure that your code follows the coding style adopted by the project;
|
||||||
|
- catch a number of errors in your code.
|
||||||
|
|
||||||
|
They're pretty fast, don't hesitate!
|
||||||
|
|
||||||
|
```sh
|
||||||
|
source ./env/bin/activate
|
||||||
./scripts-dev/lint.sh
|
./scripts-dev/lint.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
**Note that the script does not just test/check, but also reformats code, so you
|
Note that this script *will modify your files* to fix styling errors.
|
||||||
may wish to ensure any new code is committed first**.
|
Make sure that you have saved all your files.
|
||||||
|
|
||||||
By default, this script checks all files and can take some time; if you alter
|
If you wish to restrict the linters to only the files changed since the last commit
|
||||||
only certain files, you might wish to specify paths as arguments to reduce the
|
(much faster!), you can instead run:
|
||||||
run-time:
|
|
||||||
|
|
||||||
|
```sh
|
||||||
|
source ./env/bin/activate
|
||||||
|
./scripts-dev/lint.sh -d
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Or if you know exactly which files you wish to lint, you can instead run:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
source ./env/bin/activate
|
||||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also provide the `-d` option, which will lint the files that have been
|
## Run the unit tests.
|
||||||
changed since the last git commit. This will often be significantly faster than
|
|
||||||
linting the whole codebase.
|
|
||||||
|
|
||||||
Before pushing new changes, ensure they don't produce linting errors. Commit any
|
The unit tests run parts of Synapse, including your changes, to see if anything
|
||||||
files that were corrected.
|
was broken. They are slower than the linters but will typically catch more errors.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
source ./env/bin/activate
|
||||||
|
trial tests
|
||||||
|
```
|
||||||
|
|
||||||
|
If you wish to only run *some* unit tests, you may specify
|
||||||
|
another module instead of `tests` - or a test class or a method:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
source ./env/bin/activate
|
||||||
|
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||||
|
```
|
||||||
|
|
||||||
|
If your tests fail, you may wish to look at the logs:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
less _trial_temp/test.log
|
||||||
|
```
|
||||||
|
|
||||||
|
## Run the integration tests.
|
||||||
|
|
||||||
|
The integration tests are a more comprehensive suite of tests. They
|
||||||
|
run a full version of Synapse, including your changes, to check if
|
||||||
|
anything was broken. They are slower than the unit tests but will
|
||||||
|
typically catch more errors.
|
||||||
|
|
||||||
|
The following command will let you run the integration test with the most common
|
||||||
|
configuration:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:py37
|
||||||
|
```
|
||||||
|
|
||||||
|
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||||
|
|
||||||
|
|
||||||
|
# 9. Submit your patch.
|
||||||
|
|
||||||
|
Once you're happy with your patch, it's time to prepare a Pull Request.
|
||||||
|
|
||||||
|
To prepare a Pull Request, please:
|
||||||
|
|
||||||
|
1. verify that [all the tests pass](#test-test-test), including the coding style;
|
||||||
|
2. [sign off](#sign-off) your contribution;
|
||||||
|
3. `git push` your commit to your fork of Synapse;
|
||||||
|
4. on GitHub, [create the Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request);
|
||||||
|
5. add a [changelog entry](#changelog) and push it to your Pull Request;
|
||||||
|
6. for most contributors, that's all - however, if you are a member of the organization `matrix-org`, on GitHub, please request a review from `matrix.org / Synapse Core`.
|
||||||
|
|
||||||
Please ensure your changes match the cosmetic style of the existing project,
|
|
||||||
and **never** mix cosmetic and functional changes in the same commit, as it
|
|
||||||
makes it horribly hard to review otherwise.
|
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
@@ -156,24 +292,6 @@ directory, you will need both a regular newsfragment *and* an entry in the
|
|||||||
debian changelog. (Though typically such changes should be submitted as two
|
debian changelog. (Though typically such changes should be submitted as two
|
||||||
separate pull requests.)
|
separate pull requests.)
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
There is a growing amount of documentation located in the [docs](docs)
|
|
||||||
directory. This documentation is intended primarily for sysadmins running their
|
|
||||||
own Synapse instance, as well as developers interacting externally with
|
|
||||||
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
|
|
||||||
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
|
|
||||||
regarding Synapse's Admin API, which is used mostly by sysadmins and external
|
|
||||||
service developers.
|
|
||||||
|
|
||||||
New files added to both folders should be written in [Github-Flavoured
|
|
||||||
Markdown](https://guides.github.com/features/mastering-markdown/), and attempts
|
|
||||||
should be made to migrate existing documents to markdown where possible.
|
|
||||||
|
|
||||||
Some documentation also exists in [Synapse's Github
|
|
||||||
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
|
||||||
contributed to by community authors.
|
|
||||||
|
|
||||||
## Sign off
|
## Sign off
|
||||||
|
|
||||||
In order to have a concrete record that your contribution is intentional
|
In order to have a concrete record that your contribution is intentional
|
||||||
@@ -240,47 +358,36 @@ Git allows you to add this signoff automatically when using the `-s`
|
|||||||
flag to `git commit`, which uses the name and email set in your
|
flag to `git commit`, which uses the name and email set in your
|
||||||
`user.name` and `user.email` git configs.
|
`user.name` and `user.email` git configs.
|
||||||
|
|
||||||
## Continuous integration and testing
|
|
||||||
|
|
||||||
[Buildkite](https://buildkite.com/matrix-dot-org/synapse) will automatically
|
# 10. Turn feedback into better code.
|
||||||
run a series of checks and tests against any PR which is opened against the
|
|
||||||
project; if your change breaks the build, this will be shown in GitHub, with
|
|
||||||
links to the build results. If your build fails, please try to fix the errors
|
|
||||||
and update your branch.
|
|
||||||
|
|
||||||
To run unit tests in a local development environment, you can use:
|
Once the Pull Request is opened, you will see a few things:
|
||||||
|
|
||||||
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
1. our automated CI (Continuous Integration) pipeline will run (again) the linters, the unit tests, the integration tests and more;
|
||||||
for SQLite-backed Synapse on Python 3.5.
|
2. one or more of the developers will take a look at your Pull Request and offer feedback.
|
||||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
|
||||||
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
|
||||||
(requires a running local PostgreSQL with access to create databases).
|
|
||||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
|
||||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
|
||||||
set up PostgreSQL yourself.
|
|
||||||
|
|
||||||
Docker images are available for running the integration tests (SyTest) locally,
|
From this point, you should:
|
||||||
see the [documentation in the SyTest repo](
|
|
||||||
https://github.com/matrix-org/sytest/blob/develop/docker/README.md) for more
|
|
||||||
information.
|
|
||||||
|
|
||||||
## Updating your pull request
|
1. Look at the results of the CI pipeline.
|
||||||
|
- If there is any error, fix the error.
|
||||||
|
2. If a developer has requested changes, make these changes and let us know if it is ready for a developer to review again.
|
||||||
|
3. Create a new commit with the changes.
|
||||||
|
- Please do NOT overwrite the history. New commits make the reviewer's life easier.
|
||||||
|
- Push this commits to your Pull Request.
|
||||||
|
4. Back to 1.
|
||||||
|
|
||||||
If you decide to make changes to your pull request - perhaps to address issues
|
Once both the CI and the developers are happy, the patch will be merged into Synapse and released shortly!
|
||||||
raised in a review, or to fix problems highlighted by [continuous
|
|
||||||
integration](#continuous-integration-and-testing) - just add new commits to your
|
|
||||||
branch, and push to GitHub. The pull request will automatically be updated.
|
|
||||||
|
|
||||||
Please **avoid** rebasing your branch, especially once the PR has been
|
# 11. Find a new issue.
|
||||||
reviewed: doing so makes it very difficult for a reviewer to see what has
|
|
||||||
changed since a previous review.
|
|
||||||
|
|
||||||
## Notes for maintainers on merging PRs etc
|
By now, you know the drill!
|
||||||
|
|
||||||
|
# Notes for maintainers on merging PRs etc
|
||||||
|
|
||||||
There are some notes for those with commit access to the project on how we
|
There are some notes for those with commit access to the project on how we
|
||||||
manage git [here](docs/dev/git.md).
|
manage git [here](docs/dev/git.md).
|
||||||
|
|
||||||
## Conclusion
|
# Conclusion
|
||||||
|
|
||||||
That's it! Matrix is a very open and collaborative project as you might expect
|
That's it! Matrix is a very open and collaborative project as you might expect
|
||||||
given our obsession with open communication. If we're going to successfully
|
given our obsession with open communication. If we're going to successfully
|
||||||
|
|||||||
20
INSTALL.md
20
INSTALL.md
@@ -151,29 +151,15 @@ sudo pacman -S base-devel python python-pip \
|
|||||||
|
|
||||||
##### CentOS/Fedora
|
##### CentOS/Fedora
|
||||||
|
|
||||||
Installing prerequisites on CentOS 8 or Fedora>26:
|
Installing prerequisites on CentOS or Fedora Linux:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
libwebp-devel tk-devel redhat-rpm-config \
|
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
|
||||||
python3-virtualenv libffi-devel openssl-devel
|
python3-virtualenv libffi-devel openssl-devel python3-devel
|
||||||
sudo dnf groupinstall "Development Tools"
|
sudo dnf groupinstall "Development Tools"
|
||||||
```
|
```
|
||||||
|
|
||||||
Installing prerequisites on CentOS 7 or Fedora<=25:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
|
||||||
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
|
||||||
python3-virtualenv libffi-devel openssl-devel
|
|
||||||
sudo yum groupinstall "Development Tools"
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that Synapse does not support versions of SQLite before 3.11, and CentOS 7
|
|
||||||
uses SQLite 3.7. You may be able to work around this by installing a more
|
|
||||||
recent SQLite version, but it is recommended that you instead use a Postgres
|
|
||||||
database: see [docs/postgres.md](docs/postgres.md).
|
|
||||||
|
|
||||||
##### macOS
|
##### macOS
|
||||||
|
|
||||||
Installing prerequisites on macOS:
|
Installing prerequisites on macOS:
|
||||||
|
|||||||
75
UPGRADE.rst
75
UPGRADE.rst
@@ -85,6 +85,79 @@ for example:
|
|||||||
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||||
|
|
||||||
|
Upgrading to v1.29.0
|
||||||
|
====================
|
||||||
|
|
||||||
|
Requirement for X-Forwarded-Proto header
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
When using Synapse with a reverse proxy (in particular, when using the
|
||||||
|
`x_forwarded` option on an HTTP listener), Synapse now expects to receive an
|
||||||
|
`X-Forwarded-Proto` header on incoming HTTP requests. If it is not set, Synapse
|
||||||
|
will log a warning on each received request.
|
||||||
|
|
||||||
|
To avoid the warning, administrators using a reverse proxy should ensure that
|
||||||
|
the reverse proxy sets `X-Forwarded-Proto` header to `https` or `http` to
|
||||||
|
indicate the protocol used by the client. See the `reverse proxy documentation
|
||||||
|
<docs/reverse_proxy.md>`_, where the example configurations have been updated to
|
||||||
|
show how to set this header.
|
||||||
|
|
||||||
|
(Users of `Caddy <https://caddyserver.com/>`_ are unaffected, since we believe it
|
||||||
|
sets `X-Forwarded-Proto` by default.)
|
||||||
|
|
||||||
|
Upgrading to v1.27.0
|
||||||
|
====================
|
||||||
|
|
||||||
|
Changes to callback URI for OAuth2 / OpenID Connect and SAML2
|
||||||
|
-------------------------------------------------------------
|
||||||
|
|
||||||
|
This version changes the URI used for callbacks from OAuth2 and SAML2 identity providers:
|
||||||
|
|
||||||
|
* If your server is configured for single sign-on via an OpenID Connect or OAuth2 identity
|
||||||
|
provider, you will need to add ``[synapse public baseurl]/_synapse/client/oidc/callback``
|
||||||
|
to the list of permitted "redirect URIs" at the identity provider.
|
||||||
|
|
||||||
|
See `docs/openid.md <docs/openid.md>`_ for more information on setting up OpenID
|
||||||
|
Connect.
|
||||||
|
|
||||||
|
* If your server is configured for single sign-on via a SAML2 identity provider, you will
|
||||||
|
need to add ``[synapse public baseurl]/_synapse/client/saml2/authn_response`` as a permitted
|
||||||
|
"ACS location" (also known as "allowed callback URLs") at the identity provider.
|
||||||
|
|
||||||
|
Changes to HTML templates
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
The HTML templates for SSO and email notifications now have `Jinja2's autoescape <https://jinja.palletsprojects.com/en/2.11.x/api/#autoescaping>`_
|
||||||
|
enabled for files ending in ``.html``, ``.htm``, and ``.xml``. If you have customised
|
||||||
|
these templates and see issues when viewing them you might need to update them.
|
||||||
|
It is expected that most configurations will need no changes.
|
||||||
|
|
||||||
|
If you have customised the templates *names* for these templates, it is recommended
|
||||||
|
to verify they end in ``.html`` to ensure autoescape is enabled.
|
||||||
|
|
||||||
|
The above applies to the following templates:
|
||||||
|
|
||||||
|
* ``add_threepid.html``
|
||||||
|
* ``add_threepid_failure.html``
|
||||||
|
* ``add_threepid_success.html``
|
||||||
|
* ``notice_expiry.html``
|
||||||
|
* ``notice_expiry.html``
|
||||||
|
* ``notif_mail.html`` (which, by default, includes ``room.html`` and ``notif.html``)
|
||||||
|
* ``password_reset.html``
|
||||||
|
* ``password_reset_confirmation.html``
|
||||||
|
* ``password_reset_failure.html``
|
||||||
|
* ``password_reset_success.html``
|
||||||
|
* ``registration.html``
|
||||||
|
* ``registration_failure.html``
|
||||||
|
* ``registration_success.html``
|
||||||
|
* ``sso_account_deactivated.html``
|
||||||
|
* ``sso_auth_bad_user.html``
|
||||||
|
* ``sso_auth_confirm.html``
|
||||||
|
* ``sso_auth_success.html``
|
||||||
|
* ``sso_error.html``
|
||||||
|
* ``sso_login_idp_picker.html``
|
||||||
|
* ``sso_redirect_confirm.html``
|
||||||
|
|
||||||
Upgrading to v1.26.0
|
Upgrading to v1.26.0
|
||||||
====================
|
====================
|
||||||
|
|
||||||
@@ -198,7 +271,7 @@ shown below:
|
|||||||
|
|
||||||
return {"localpart": localpart}
|
return {"localpart": localpart}
|
||||||
|
|
||||||
Removal historical Synapse Admin API
|
Removal historical Synapse Admin API
|
||||||
------------------------------------
|
------------------------------------
|
||||||
|
|
||||||
Historically, the Synapse Admin API has been accessible under:
|
Historically, the Synapse Admin API has been accessible under:
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
Add tests to `test_user.UsersListTestCase` for List Users Admin API.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Various improvements to the federation client.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add link to Matrix VoIP tester for turn-howto.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled).
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Speed up chain cover calculation when persisting a batch of state events at once.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a `long_description_type` to the package metadata.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Speed up batch insertion when using PostgreSQL.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Emit an error at startup if different Identity Providers are configured with the same `idp_id`.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Speed up batch insertion when using PostgreSQL.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Improve performance of concurrent use of `StreamIDGenerators`.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add some missing source directories to the automatic linting script.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.
|
|
||||||
@@ -92,7 +92,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
return self.config["user"].split(":")[1]
|
return self.config["user"].split(":")[1]
|
||||||
|
|
||||||
def do_config(self, line):
|
def do_config(self, line):
|
||||||
""" Show the config for this client: "config"
|
"""Show the config for this client: "config"
|
||||||
Edit a key value mapping: "config key value" e.g. "config token 1234"
|
Edit a key value mapping: "config key value" e.g. "config token 1234"
|
||||||
Config variables:
|
Config variables:
|
||||||
user: The username to auth with.
|
user: The username to auth with.
|
||||||
@@ -360,7 +360,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
print(e)
|
print(e)
|
||||||
|
|
||||||
def do_topic(self, line):
|
def do_topic(self, line):
|
||||||
""""topic [set|get] <roomid> [<newtopic>]"
|
""" "topic [set|get] <roomid> [<newtopic>]"
|
||||||
Set the topic for a room: topic set <roomid> <newtopic>
|
Set the topic for a room: topic set <roomid> <newtopic>
|
||||||
Get the topic for a room: topic get <roomid>
|
Get the topic for a room: topic get <roomid>
|
||||||
"""
|
"""
|
||||||
@@ -690,7 +690,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
self._do_presence_state(2, line)
|
self._do_presence_state(2, line)
|
||||||
|
|
||||||
def _parse(self, line, keys, force_keys=False):
|
def _parse(self, line, keys, force_keys=False):
|
||||||
""" Parses the given line.
|
"""Parses the given line.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
line : The line to parse
|
line : The line to parse
|
||||||
@@ -721,7 +721,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
query_params={"access_token": None},
|
query_params={"access_token": None},
|
||||||
alt_text=None,
|
alt_text=None,
|
||||||
):
|
):
|
||||||
""" Runs an HTTP request and pretty prints the output.
|
"""Runs an HTTP request and pretty prints the output.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
method: HTTP method
|
method: HTTP method
|
||||||
|
|||||||
@@ -23,11 +23,10 @@ from twisted.web.http_headers import Headers
|
|||||||
|
|
||||||
|
|
||||||
class HttpClient:
|
class HttpClient:
|
||||||
""" Interface for talking json over http
|
"""Interface for talking json over http"""
|
||||||
"""
|
|
||||||
|
|
||||||
def put_json(self, url, data):
|
def put_json(self, url, data):
|
||||||
""" Sends the specifed json data using PUT
|
"""Sends the specifed json data using PUT
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url (str): The URL to PUT data to.
|
url (str): The URL to PUT data to.
|
||||||
@@ -41,7 +40,7 @@ class HttpClient:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def get_json(self, url, args=None):
|
def get_json(self, url, args=None):
|
||||||
""" Gets some json from the given host homeserver and path
|
"""Gets some json from the given host homeserver and path
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url (str): The URL to GET data from.
|
url (str): The URL to GET data from.
|
||||||
@@ -58,7 +57,7 @@ class HttpClient:
|
|||||||
|
|
||||||
|
|
||||||
class TwistedHttpClient(HttpClient):
|
class TwistedHttpClient(HttpClient):
|
||||||
""" Wrapper around the twisted HTTP client api.
|
"""Wrapper around the twisted HTTP client api.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
||||||
@@ -87,8 +86,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
defer.returnValue(json.loads(body))
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict={}):
|
def _create_put_request(self, url, json_data, headers_dict={}):
|
||||||
""" Wrapper of _create_request to issue a PUT request
|
"""Wrapper of _create_request to issue a PUT request"""
|
||||||
"""
|
|
||||||
|
|
||||||
if "Content-Type" not in headers_dict:
|
if "Content-Type" not in headers_dict:
|
||||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||||
@@ -98,8 +96,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict={}):
|
def _create_get_request(self, url, headers_dict={}):
|
||||||
""" Wrapper of _create_request to issue a GET request
|
"""Wrapper of _create_request to issue a GET request"""
|
||||||
"""
|
|
||||||
return self._create_request("GET", url, headers_dict=headers_dict)
|
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@@ -127,8 +124,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(self, method, url, producer=None, headers_dict={}):
|
def _create_request(self, method, url, producer=None, headers_dict={}):
|
||||||
""" Creates and sends a request to the given url
|
"""Creates and sends a request to the given url"""
|
||||||
"""
|
|
||||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||||
|
|
||||||
retries_left = 5
|
retries_left = 5
|
||||||
@@ -185,8 +181,7 @@ class _RawProducer:
|
|||||||
|
|
||||||
|
|
||||||
class _JsonProducer:
|
class _JsonProducer:
|
||||||
""" Used by the twisted http client to create the HTTP body from json
|
"""Used by the twisted http client to create the HTTP body from json"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, jsn):
|
def __init__(self, jsn):
|
||||||
self.data = jsn
|
self.data = jsn
|
||||||
|
|||||||
@@ -63,8 +63,7 @@ class CursesStdIO:
|
|||||||
self.redraw()
|
self.redraw()
|
||||||
|
|
||||||
def redraw(self):
|
def redraw(self):
|
||||||
""" method for redisplaying lines
|
"""method for redisplaying lines based on internal list of lines"""
|
||||||
based on internal list of lines """
|
|
||||||
|
|
||||||
self.stdscr.clear()
|
self.stdscr.clear()
|
||||||
self.paintStatus(self.statusText)
|
self.paintStatus(self.statusText)
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ def excpetion_errback(failure):
|
|||||||
|
|
||||||
|
|
||||||
class InputOutput:
|
class InputOutput:
|
||||||
""" This is responsible for basic I/O so that a user can interact with
|
"""This is responsible for basic I/O so that a user can interact with
|
||||||
the example app.
|
the example app.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -68,8 +68,7 @@ class InputOutput:
|
|||||||
self.server = server
|
self.server = server
|
||||||
|
|
||||||
def on_line(self, line):
|
def on_line(self, line):
|
||||||
""" This is where we process commands.
|
"""This is where we process commands."""
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
m = re.match(r"^join (\S+)$", line)
|
m = re.match(r"^join (\S+)$", line)
|
||||||
@@ -133,7 +132,7 @@ class IOLoggerHandler(logging.Handler):
|
|||||||
|
|
||||||
|
|
||||||
class Room:
|
class Room:
|
||||||
""" Used to store (in memory) the current membership state of a room, and
|
"""Used to store (in memory) the current membership state of a room, and
|
||||||
which home servers we should send PDUs associated with the room to.
|
which home servers we should send PDUs associated with the room to.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -148,8 +147,7 @@ class Room:
|
|||||||
self.have_got_metadata = False
|
self.have_got_metadata = False
|
||||||
|
|
||||||
def add_participant(self, participant):
|
def add_participant(self, participant):
|
||||||
""" Someone has joined the room
|
"""Someone has joined the room"""
|
||||||
"""
|
|
||||||
self.participants.add(participant)
|
self.participants.add(participant)
|
||||||
self.invited.discard(participant)
|
self.invited.discard(participant)
|
||||||
|
|
||||||
@@ -160,14 +158,13 @@ class Room:
|
|||||||
self.oldest_server = server
|
self.oldest_server = server
|
||||||
|
|
||||||
def add_invited(self, invitee):
|
def add_invited(self, invitee):
|
||||||
""" Someone has been invited to the room
|
"""Someone has been invited to the room"""
|
||||||
"""
|
|
||||||
self.invited.add(invitee)
|
self.invited.add(invitee)
|
||||||
self.servers.add(origin_from_ucid(invitee))
|
self.servers.add(origin_from_ucid(invitee))
|
||||||
|
|
||||||
|
|
||||||
class HomeServer(ReplicationHandler):
|
class HomeServer(ReplicationHandler):
|
||||||
""" A very basic home server implentation that allows people to join a
|
"""A very basic home server implentation that allows people to join a
|
||||||
room and then invite other people.
|
room and then invite other people.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -181,8 +178,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
self.output = output
|
self.output = output
|
||||||
|
|
||||||
def on_receive_pdu(self, pdu):
|
def on_receive_pdu(self, pdu):
|
||||||
""" We just received a PDU
|
"""We just received a PDU"""
|
||||||
"""
|
|
||||||
pdu_type = pdu.pdu_type
|
pdu_type = pdu.pdu_type
|
||||||
|
|
||||||
if pdu_type == "sy.room.message":
|
if pdu_type == "sy.room.message":
|
||||||
@@ -199,23 +195,20 @@ class HomeServer(ReplicationHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _on_message(self, pdu):
|
def _on_message(self, pdu):
|
||||||
""" We received a message
|
"""We received a message"""
|
||||||
"""
|
|
||||||
self.output.print_line(
|
self.output.print_line(
|
||||||
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_join(self, context, joinee):
|
def _on_join(self, context, joinee):
|
||||||
""" Someone has joined a room, either a remote user or a local user
|
"""Someone has joined a room, either a remote user or a local user"""
|
||||||
"""
|
|
||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_participant(joinee)
|
room.add_participant(joinee)
|
||||||
|
|
||||||
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||||
|
|
||||||
def _on_invite(self, origin, context, invitee):
|
def _on_invite(self, origin, context, invitee):
|
||||||
""" Someone has been invited
|
"""Someone has been invited"""
|
||||||
"""
|
|
||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_invited(invitee)
|
room.add_invited(invitee)
|
||||||
|
|
||||||
@@ -228,8 +221,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def send_message(self, room_name, sender, body):
|
def send_message(self, room_name, sender, body):
|
||||||
""" Send a message to a room!
|
"""Send a message to a room!"""
|
||||||
"""
|
|
||||||
destinations = yield self.get_servers_for_context(room_name)
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -247,8 +239,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def join_room(self, room_name, sender, joinee):
|
def join_room(self, room_name, sender, joinee):
|
||||||
""" Join a room!
|
"""Join a room!"""
|
||||||
"""
|
|
||||||
self._on_join(room_name, joinee)
|
self._on_join(room_name, joinee)
|
||||||
|
|
||||||
destinations = yield self.get_servers_for_context(room_name)
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
@@ -269,8 +260,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def invite_to_room(self, room_name, sender, invitee):
|
def invite_to_room(self, room_name, sender, invitee):
|
||||||
""" Invite someone to a room!
|
"""Invite someone to a room!"""
|
||||||
"""
|
|
||||||
self._on_invite(self.server_name, room_name, invitee)
|
self._on_invite(self.server_name, room_name, invitee)
|
||||||
|
|
||||||
destinations = yield self.get_servers_for_context(room_name)
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|||||||
@@ -193,15 +193,12 @@ class TrivialXmppClient:
|
|||||||
time.sleep(7)
|
time.sleep(7)
|
||||||
print("SSRC spammer started")
|
print("SSRC spammer started")
|
||||||
while self.running:
|
while self.running:
|
||||||
ssrcMsg = (
|
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
||||||
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||||
% {
|
"nick": self.userId,
|
||||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
"assrc": self.ssrcs["audio"],
|
||||||
"nick": self.userId,
|
"vssrc": self.ssrcs["video"],
|
||||||
"assrc": self.ssrcs["audio"],
|
}
|
||||||
"vssrc": self.ssrcs["video"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
res = self.sendIq(ssrcMsg)
|
res = self.sendIq(ssrcMsg)
|
||||||
print("reply from ssrc announce: ", res)
|
print("reply from ssrc announce: ", res)
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|||||||
10
debian/build_virtualenv
vendored
10
debian/build_virtualenv
vendored
@@ -33,11 +33,13 @@ esac
|
|||||||
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
||||||
# than the 2/3 compatible `virtualenv`.
|
# than the 2/3 compatible `virtualenv`.
|
||||||
|
|
||||||
|
# Pin pip to 20.3.4 to fix breakage in 21.0 on py3.5 (xenial)
|
||||||
|
|
||||||
dh_virtualenv \
|
dh_virtualenv \
|
||||||
--install-suffix "matrix-synapse" \
|
--install-suffix "matrix-synapse" \
|
||||||
--builtin-venv \
|
--builtin-venv \
|
||||||
--python "$SNAKE" \
|
--python "$SNAKE" \
|
||||||
--upgrade-pip \
|
--upgrade-pip-to="20.3.4" \
|
||||||
--preinstall="lxml" \
|
--preinstall="lxml" \
|
||||||
--preinstall="mock" \
|
--preinstall="mock" \
|
||||||
--extra-pip-arg="--no-cache-dir" \
|
--extra-pip-arg="--no-cache-dir" \
|
||||||
@@ -56,10 +58,10 @@ trap "rm -r $tmpdir" EXIT
|
|||||||
cp -r tests "$tmpdir"
|
cp -r tests "$tmpdir"
|
||||||
|
|
||||||
PYTHONPATH="$tmpdir" \
|
PYTHONPATH="$tmpdir" \
|
||||||
"${TARGET_PYTHON}" -B -m twisted.trial --reporter=text -j2 tests
|
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
|
||||||
|
|
||||||
# build the config file
|
# build the config file
|
||||||
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_config" \
|
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_config" \
|
||||||
--config-dir="/etc/matrix-synapse" \
|
--config-dir="/etc/matrix-synapse" \
|
||||||
--data-dir="/var/lib/matrix-synapse" |
|
--data-dir="/var/lib/matrix-synapse" |
|
||||||
perl -pe '
|
perl -pe '
|
||||||
@@ -85,7 +87,7 @@ PYTHONPATH="$tmpdir" \
|
|||||||
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
||||||
|
|
||||||
# build the log config file
|
# build the log config file
|
||||||
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
||||||
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
||||||
|
|
||||||
# add a dependency on the right version of python to substvars.
|
# add a dependency on the right version of python to substvars.
|
||||||
|
|||||||
34
debian/changelog
vendored
34
debian/changelog
vendored
@@ -1,8 +1,38 @@
|
|||||||
matrix-synapse-py3 (1.25.0ubuntu1) UNRELEASED; urgency=medium
|
matrix-synapse-py3 (1.29.0) stable; urgency=medium
|
||||||
|
|
||||||
|
[ Jonathan de Jong ]
|
||||||
|
* Remove the python -B flag (don't generate bytecode) in scripts and documentation.
|
||||||
|
|
||||||
|
[ Synapse Packaging team ]
|
||||||
|
* New synapse release 1.29.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Mon, 08 Mar 2021 13:51:50 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.28.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.28.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Feb 2021 10:21:57 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.27.0) stable; urgency=medium
|
||||||
|
|
||||||
|
[ Dan Callahan ]
|
||||||
|
* Fix build on Ubuntu 16.04 LTS (Xenial).
|
||||||
|
|
||||||
|
[ Synapse Packaging team ]
|
||||||
|
* New synapse release 1.27.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Feb 2021 13:11:28 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.26.0) stable; urgency=medium
|
||||||
|
|
||||||
|
[ Richard van der Hoff ]
|
||||||
* Remove dependency on `python3-distutils`.
|
* Remove dependency on `python3-distutils`.
|
||||||
|
|
||||||
-- Richard van der Hoff <richard@matrix.org> Fri, 15 Jan 2021 12:44:19 +0000
|
[ Synapse Packaging team ]
|
||||||
|
* New synapse release 1.26.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Wed, 27 Jan 2021 12:43:35 -0500
|
||||||
|
|
||||||
matrix-synapse-py3 (1.25.0) stable; urgency=medium
|
matrix-synapse-py3 (1.25.0) stable; urgency=medium
|
||||||
|
|
||||||
|
|||||||
2
debian/synctl.1
vendored
2
debian/synctl.1
vendored
@@ -44,7 +44,7 @@ Configuration file may be generated as follows:
|
|||||||
.
|
.
|
||||||
.nf
|
.nf
|
||||||
|
|
||||||
$ python \-B \-m synapse\.app\.homeserver \-c config\.yaml \-\-generate\-config \-\-server\-name=<server name>
|
$ python \-m synapse\.app\.homeserver \-c config\.yaml \-\-generate\-config \-\-server\-name=<server name>
|
||||||
.
|
.
|
||||||
.fi
|
.fi
|
||||||
.
|
.
|
||||||
|
|||||||
2
debian/synctl.ronn
vendored
2
debian/synctl.ronn
vendored
@@ -41,7 +41,7 @@ process.
|
|||||||
|
|
||||||
Configuration file may be generated as follows:
|
Configuration file may be generated as follows:
|
||||||
|
|
||||||
$ python -B -m synapse.app.homeserver -c config.yaml --generate-config --server-name=<server name>
|
$ python -m synapse.app.homeserver -c config.yaml --generate-config --server-name=<server name>
|
||||||
|
|
||||||
## ENVIRONMENT
|
## ENVIRONMENT
|
||||||
|
|
||||||
|
|||||||
@@ -28,11 +28,13 @@ RUN apt-get update && apt-get install -y \
|
|||||||
libwebp-dev \
|
libwebp-dev \
|
||||||
libxml++2.6-dev \
|
libxml++2.6-dev \
|
||||||
libxslt1-dev \
|
libxslt1-dev \
|
||||||
|
rustc \
|
||||||
zlib1g-dev \
|
zlib1g-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Build dependencies that are not available as wheels, to speed up rebuilds
|
# Build dependencies that are not available as wheels, to speed up rebuilds
|
||||||
RUN pip install --prefix="/install" --no-warn-script-location \
|
RUN pip install --prefix="/install" --no-warn-script-location \
|
||||||
|
cryptography \
|
||||||
frozendict \
|
frozendict \
|
||||||
jaeger-client \
|
jaeger-client \
|
||||||
opentracing \
|
opentracing \
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
|||||||
wget
|
wget
|
||||||
|
|
||||||
# fetch and unpack the package
|
# fetch and unpack the package
|
||||||
|
# TODO: Upgrade to 1.2.2 once xenial is dropped
|
||||||
RUN mkdir /dh-virtualenv
|
RUN mkdir /dh-virtualenv
|
||||||
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/ac6e1b1.tar.gz
|
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/ac6e1b1.tar.gz
|
||||||
RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz
|
RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ The image also does *not* provide a TURN server.
|
|||||||
By default, the image expects a single volume, located at ``/data``, that will hold:
|
By default, the image expects a single volume, located at ``/data``, that will hold:
|
||||||
|
|
||||||
* configuration files;
|
* configuration files;
|
||||||
* temporary files during uploads;
|
|
||||||
* uploaded media and thumbnails;
|
* uploaded media and thumbnails;
|
||||||
* the SQLite database if you do not configure postgres;
|
* the SQLite database if you do not configure postgres;
|
||||||
* the appservices configuration.
|
* the appservices configuration.
|
||||||
|
|||||||
@@ -89,7 +89,6 @@ federation_rc_concurrent: 3
|
|||||||
## Files ##
|
## Files ##
|
||||||
|
|
||||||
media_store_path: "/data/media"
|
media_store_path: "/data/media"
|
||||||
uploads_path: "/data/uploads"
|
|
||||||
max_upload_size: "{{ SYNAPSE_MAX_UPLOAD_SIZE or "50M" }}"
|
max_upload_size: "{{ SYNAPSE_MAX_UPLOAD_SIZE or "50M" }}"
|
||||||
max_image_pixels: "32M"
|
max_image_pixels: "32M"
|
||||||
dynamic_thumbnails: false
|
dynamic_thumbnails: false
|
||||||
|
|||||||
@@ -9,6 +9,8 @@
|
|||||||
* [Response](#response)
|
* [Response](#response)
|
||||||
* [Undoing room shutdowns](#undoing-room-shutdowns)
|
* [Undoing room shutdowns](#undoing-room-shutdowns)
|
||||||
- [Make Room Admin API](#make-room-admin-api)
|
- [Make Room Admin API](#make-room-admin-api)
|
||||||
|
- [Forward Extremities Admin API](#forward-extremities-admin-api)
|
||||||
|
- [Event Context API](#event-context-api)
|
||||||
|
|
||||||
# List Room API
|
# List Room API
|
||||||
|
|
||||||
@@ -367,6 +369,36 @@ Response:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Room State API
|
||||||
|
|
||||||
|
The Room State admin API allows server admins to get a list of all state events in a room.
|
||||||
|
|
||||||
|
The response includes the following fields:
|
||||||
|
|
||||||
|
* `state` - The current state of the room at the time of request.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
A standard request:
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /_synapse/admin/v1/rooms/<room_id>/state
|
||||||
|
|
||||||
|
{}
|
||||||
|
```
|
||||||
|
|
||||||
|
Response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"state": [
|
||||||
|
{"type": "m.room.create", "state_key": "", "etc": true},
|
||||||
|
{"type": "m.room.power_levels", "state_key": "", "etc": true},
|
||||||
|
{"type": "m.room.name", "state_key": "", "etc": true}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
# Delete Room API
|
# Delete Room API
|
||||||
|
|
||||||
The Delete Room admin API allows server admins to remove rooms from server
|
The Delete Room admin API allows server admins to remove rooms from server
|
||||||
@@ -511,3 +543,173 @@ optionally be specified, e.g.:
|
|||||||
"user_id": "@foo:example.com"
|
"user_id": "@foo:example.com"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Forward Extremities Admin API
|
||||||
|
|
||||||
|
Enables querying and deleting forward extremities from rooms. When a lot of forward
|
||||||
|
extremities accumulate in a room, performance can become degraded. For details, see
|
||||||
|
[#1760](https://github.com/matrix-org/synapse/issues/1760).
|
||||||
|
|
||||||
|
## Check for forward extremities
|
||||||
|
|
||||||
|
To check the status of forward extremities for a room:
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /_synapse/admin/v1/rooms/<room_id_or_alias>/forward_extremities
|
||||||
|
```
|
||||||
|
|
||||||
|
A response as follows will be returned:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"count": 1,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"event_id": "$M5SP266vsnxctfwFgFLNceaCo3ujhRtg_NiiHabcdefgh",
|
||||||
|
"state_group": 439,
|
||||||
|
"depth": 123,
|
||||||
|
"received_ts": 1611263016761
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Deleting forward extremities
|
||||||
|
|
||||||
|
**WARNING**: Please ensure you know what you're doing and have read
|
||||||
|
the related issue [#1760](https://github.com/matrix-org/synapse/issues/1760).
|
||||||
|
Under no situations should this API be executed as an automated maintenance task!
|
||||||
|
|
||||||
|
If a room has lots of forward extremities, the extra can be
|
||||||
|
deleted as follows:
|
||||||
|
|
||||||
|
```
|
||||||
|
DELETE /_synapse/admin/v1/rooms/<room_id_or_alias>/forward_extremities
|
||||||
|
```
|
||||||
|
|
||||||
|
A response as follows will be returned, indicating the amount of forward extremities
|
||||||
|
that were deleted.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"deleted": 1
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
# Event Context API
|
||||||
|
|
||||||
|
This API lets a client find the context of an event. This is designed primarily to investigate abuse reports.
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /_synapse/admin/v1/rooms/<room_id>/context/<event_id>
|
||||||
|
```
|
||||||
|
|
||||||
|
This API mimmicks [GET /_matrix/client/r0/rooms/{roomId}/context/{eventId}](https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-rooms-roomid-context-eventid). Please refer to the link for all details on parameters and reseponse.
|
||||||
|
|
||||||
|
Example response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"end": "t29-57_2_0_2",
|
||||||
|
"events_after": [
|
||||||
|
{
|
||||||
|
"content": {
|
||||||
|
"body": "This is an example text message",
|
||||||
|
"msgtype": "m.text",
|
||||||
|
"format": "org.matrix.custom.html",
|
||||||
|
"formatted_body": "<b>This is an example text message</b>"
|
||||||
|
},
|
||||||
|
"type": "m.room.message",
|
||||||
|
"event_id": "$143273582443PhrSn:example.org",
|
||||||
|
"room_id": "!636q39766251:example.com",
|
||||||
|
"sender": "@example:example.org",
|
||||||
|
"origin_server_ts": 1432735824653,
|
||||||
|
"unsigned": {
|
||||||
|
"age": 1234
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"event": {
|
||||||
|
"content": {
|
||||||
|
"body": "filename.jpg",
|
||||||
|
"info": {
|
||||||
|
"h": 398,
|
||||||
|
"w": 394,
|
||||||
|
"mimetype": "image/jpeg",
|
||||||
|
"size": 31037
|
||||||
|
},
|
||||||
|
"url": "mxc://example.org/JWEIFJgwEIhweiWJE",
|
||||||
|
"msgtype": "m.image"
|
||||||
|
},
|
||||||
|
"type": "m.room.message",
|
||||||
|
"event_id": "$f3h4d129462ha:example.com",
|
||||||
|
"room_id": "!636q39766251:example.com",
|
||||||
|
"sender": "@example:example.org",
|
||||||
|
"origin_server_ts": 1432735824653,
|
||||||
|
"unsigned": {
|
||||||
|
"age": 1234
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"events_before": [
|
||||||
|
{
|
||||||
|
"content": {
|
||||||
|
"body": "something-important.doc",
|
||||||
|
"filename": "something-important.doc",
|
||||||
|
"info": {
|
||||||
|
"mimetype": "application/msword",
|
||||||
|
"size": 46144
|
||||||
|
},
|
||||||
|
"msgtype": "m.file",
|
||||||
|
"url": "mxc://example.org/FHyPlCeYUSFFxlgbQYZmoEoe"
|
||||||
|
},
|
||||||
|
"type": "m.room.message",
|
||||||
|
"event_id": "$143273582443PhrSn:example.org",
|
||||||
|
"room_id": "!636q39766251:example.com",
|
||||||
|
"sender": "@example:example.org",
|
||||||
|
"origin_server_ts": 1432735824653,
|
||||||
|
"unsigned": {
|
||||||
|
"age": 1234
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"start": "t27-54_2_0_2",
|
||||||
|
"state": [
|
||||||
|
{
|
||||||
|
"content": {
|
||||||
|
"creator": "@example:example.org",
|
||||||
|
"room_version": "1",
|
||||||
|
"m.federate": true,
|
||||||
|
"predecessor": {
|
||||||
|
"event_id": "$something:example.org",
|
||||||
|
"room_id": "!oldroom:example.org"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "m.room.create",
|
||||||
|
"event_id": "$143273582443PhrSn:example.org",
|
||||||
|
"room_id": "!636q39766251:example.com",
|
||||||
|
"sender": "@example:example.org",
|
||||||
|
"origin_server_ts": 1432735824653,
|
||||||
|
"unsigned": {
|
||||||
|
"age": 1234
|
||||||
|
},
|
||||||
|
"state_key": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"content": {
|
||||||
|
"membership": "join",
|
||||||
|
"avatar_url": "mxc://example.org/SEsfnsuifSDFSSEF",
|
||||||
|
"displayname": "Alice Margatroid"
|
||||||
|
},
|
||||||
|
"type": "m.room.member",
|
||||||
|
"event_id": "$143273582443PhrSn:example.org",
|
||||||
|
"room_id": "!636q39766251:example.com",
|
||||||
|
"sender": "@example:example.org",
|
||||||
|
"origin_server_ts": 1432735824653,
|
||||||
|
"unsigned": {
|
||||||
|
"age": 1234
|
||||||
|
},
|
||||||
|
"state_key": "@alice:example.org"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|||||||
@@ -29,8 +29,9 @@ It returns a JSON body like the following:
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"avatar_url": "<avatar_url>",
|
"avatar_url": "<avatar_url>",
|
||||||
"admin": false,
|
"admin": 0,
|
||||||
"deactivated": false,
|
"deactivated": 0,
|
||||||
|
"shadow_banned": 0,
|
||||||
"password_hash": "$2b$12$p9B4GkqYdRTPGD",
|
"password_hash": "$2b$12$p9B4GkqYdRTPGD",
|
||||||
"creation_ts": 1560432506,
|
"creation_ts": 1560432506,
|
||||||
"appservice_id": null,
|
"appservice_id": null,
|
||||||
@@ -150,6 +151,7 @@ A JSON body is returned with the following shape:
|
|||||||
"admin": 0,
|
"admin": 0,
|
||||||
"user_type": null,
|
"user_type": null,
|
||||||
"deactivated": 0,
|
"deactivated": 0,
|
||||||
|
"shadow_banned": 0,
|
||||||
"displayname": "<User One>",
|
"displayname": "<User One>",
|
||||||
"avatar_url": null
|
"avatar_url": null
|
||||||
}, {
|
}, {
|
||||||
@@ -158,6 +160,7 @@ A JSON body is returned with the following shape:
|
|||||||
"admin": 1,
|
"admin": 1,
|
||||||
"user_type": null,
|
"user_type": null,
|
||||||
"deactivated": 0,
|
"deactivated": 0,
|
||||||
|
"shadow_banned": 0,
|
||||||
"displayname": "<User Two>",
|
"displayname": "<User Two>",
|
||||||
"avatar_url": "<avatar_url>"
|
"avatar_url": "<avatar_url>"
|
||||||
}
|
}
|
||||||
@@ -262,7 +265,7 @@ The following actions are performed when deactivating an user:
|
|||||||
- Reject all pending invites
|
- Reject all pending invites
|
||||||
- Remove all account validity information related to the user
|
- Remove all account validity information related to the user
|
||||||
|
|
||||||
The following additional actions are performed during deactivation if``erase``
|
The following additional actions are performed during deactivation if ``erase``
|
||||||
is set to ``true``:
|
is set to ``true``:
|
||||||
|
|
||||||
- Remove the user's display name
|
- Remove the user's display name
|
||||||
@@ -376,11 +379,12 @@ The following fields are returned in the JSON response body:
|
|||||||
- ``total`` - Number of rooms.
|
- ``total`` - Number of rooms.
|
||||||
|
|
||||||
|
|
||||||
List media of an user
|
List media of a user
|
||||||
================================
|
====================
|
||||||
Gets a list of all local media that a specific ``user_id`` has created.
|
Gets a list of all local media that a specific ``user_id`` has created.
|
||||||
The response is ordered by creation date descending and media ID descending.
|
By default, the response is ordered by descending creation date and ascending media ID.
|
||||||
The newest media is on top.
|
The newest media is on top. You can change the order with parameters
|
||||||
|
``order_by`` and ``dir``.
|
||||||
|
|
||||||
The API is::
|
The API is::
|
||||||
|
|
||||||
@@ -437,6 +441,35 @@ The following parameters should be set in the URL:
|
|||||||
denoting the offset in the returned results. This should be treated as an opaque value and
|
denoting the offset in the returned results. This should be treated as an opaque value and
|
||||||
not explicitly set to anything other than the return value of ``next_token`` from a previous call.
|
not explicitly set to anything other than the return value of ``next_token`` from a previous call.
|
||||||
Defaults to ``0``.
|
Defaults to ``0``.
|
||||||
|
- ``order_by`` - The method by which to sort the returned list of media.
|
||||||
|
If the ordered field has duplicates, the second order is always by ascending ``media_id``,
|
||||||
|
which guarantees a stable ordering. Valid values are:
|
||||||
|
|
||||||
|
- ``media_id`` - Media are ordered alphabetically by ``media_id``.
|
||||||
|
- ``upload_name`` - Media are ordered alphabetically by name the media was uploaded with.
|
||||||
|
- ``created_ts`` - Media are ordered by when the content was uploaded in ms.
|
||||||
|
Smallest to largest. This is the default.
|
||||||
|
- ``last_access_ts`` - Media are ordered by when the content was last accessed in ms.
|
||||||
|
Smallest to largest.
|
||||||
|
- ``media_length`` - Media are ordered by length of the media in bytes.
|
||||||
|
Smallest to largest.
|
||||||
|
- ``media_type`` - Media are ordered alphabetically by MIME-type.
|
||||||
|
- ``quarantined_by`` - Media are ordered alphabetically by the user ID that
|
||||||
|
initiated the quarantine request for this media.
|
||||||
|
- ``safe_from_quarantine`` - Media are ordered by the status if this media is safe
|
||||||
|
from quarantining.
|
||||||
|
|
||||||
|
- ``dir`` - Direction of media order. Either ``f`` for forwards or ``b`` for backwards.
|
||||||
|
Setting this value to ``b`` will reverse the above sort order. Defaults to ``f``.
|
||||||
|
|
||||||
|
If neither ``order_by`` nor ``dir`` is set, the default order is newest media on top
|
||||||
|
(corresponds to ``order_by`` = ``created_ts`` and ``dir`` = ``b``).
|
||||||
|
|
||||||
|
Caution. The database only has indexes on the columns ``media_id``,
|
||||||
|
``user_id`` and ``created_ts``. This means that if a different sort order is used
|
||||||
|
(``upload_name``, ``last_access_ts``, ``media_length``, ``media_type``,
|
||||||
|
``quarantined_by`` or ``safe_from_quarantine``), this can cause a large load on the
|
||||||
|
database, especially for large environments.
|
||||||
|
|
||||||
**Response**
|
**Response**
|
||||||
|
|
||||||
@@ -760,3 +793,33 @@ The following fields are returned in the JSON response body:
|
|||||||
- ``total`` - integer - Number of pushers.
|
- ``total`` - integer - Number of pushers.
|
||||||
|
|
||||||
See also `Client-Server API Spec <https://matrix.org/docs/spec/client_server/latest#get-matrix-client-r0-pushers>`_
|
See also `Client-Server API Spec <https://matrix.org/docs/spec/client_server/latest#get-matrix-client-r0-pushers>`_
|
||||||
|
|
||||||
|
Shadow-banning users
|
||||||
|
====================
|
||||||
|
|
||||||
|
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
|
||||||
|
A shadow-banned users receives successful responses to their client-server API requests,
|
||||||
|
but the events are not propagated into rooms. This can be an effective tool as it
|
||||||
|
(hopefully) takes longer for the user to realise they are being moderated before
|
||||||
|
pivoting to another account.
|
||||||
|
|
||||||
|
Shadow-banning a user should be used as a tool of last resort and may lead to confusing
|
||||||
|
or broken behaviour for the client. A shadow-banned user will not receive any
|
||||||
|
notification and it is generally more appropriate to ban or kick abusive users.
|
||||||
|
A shadow-banned user will be unable to contact anyone on the server.
|
||||||
|
|
||||||
|
The API is::
|
||||||
|
|
||||||
|
POST /_synapse/admin/v1/users/<user_id>/shadow_ban
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||||
|
server admin: see `README.rst <README.rst>`_.
|
||||||
|
|
||||||
|
An empty JSON dict is returned.
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
The following parameters should be set in the URL:
|
||||||
|
|
||||||
|
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
|
||||||
|
be local.
|
||||||
|
|||||||
@@ -8,16 +8,16 @@ errors in code.
|
|||||||
|
|
||||||
The necessary tools are detailed below.
|
The necessary tools are detailed below.
|
||||||
|
|
||||||
|
First install them with:
|
||||||
|
|
||||||
|
pip install -e ".[lint,mypy]"
|
||||||
|
|
||||||
- **black**
|
- **black**
|
||||||
|
|
||||||
The Synapse codebase uses [black](https://pypi.org/project/black/)
|
The Synapse codebase uses [black](https://pypi.org/project/black/)
|
||||||
as an opinionated code formatter, ensuring all comitted code is
|
as an opinionated code formatter, ensuring all comitted code is
|
||||||
properly formatted.
|
properly formatted.
|
||||||
|
|
||||||
First install `black` with:
|
|
||||||
|
|
||||||
pip install --upgrade black
|
|
||||||
|
|
||||||
Have `black` auto-format your code (it shouldn't change any
|
Have `black` auto-format your code (it shouldn't change any
|
||||||
functionality) with:
|
functionality) with:
|
||||||
|
|
||||||
@@ -28,10 +28,6 @@ The necessary tools are detailed below.
|
|||||||
`flake8` is a code checking tool. We require code to pass `flake8`
|
`flake8` is a code checking tool. We require code to pass `flake8`
|
||||||
before being merged into the codebase.
|
before being merged into the codebase.
|
||||||
|
|
||||||
Install `flake8` with:
|
|
||||||
|
|
||||||
pip install --upgrade flake8 flake8-comprehensions
|
|
||||||
|
|
||||||
Check all application and test code with:
|
Check all application and test code with:
|
||||||
|
|
||||||
flake8 synapse tests
|
flake8 synapse tests
|
||||||
@@ -41,10 +37,6 @@ The necessary tools are detailed below.
|
|||||||
`isort` ensures imports are nicely formatted, and can suggest and
|
`isort` ensures imports are nicely formatted, and can suggest and
|
||||||
auto-fix issues such as double-importing.
|
auto-fix issues such as double-importing.
|
||||||
|
|
||||||
Install `isort` with:
|
|
||||||
|
|
||||||
pip install --upgrade isort
|
|
||||||
|
|
||||||
Auto-fix imports with:
|
Auto-fix imports with:
|
||||||
|
|
||||||
isort -rc synapse tests
|
isort -rc synapse tests
|
||||||
|
|||||||
129
docs/openid.md
129
docs/openid.md
@@ -44,7 +44,7 @@ as follows:
|
|||||||
|
|
||||||
To enable the OpenID integration, you should then add a section to the `oidc_providers`
|
To enable the OpenID integration, you should then add a section to the `oidc_providers`
|
||||||
setting in your configuration file (or uncomment one of the existing examples).
|
setting in your configuration file (or uncomment one of the existing examples).
|
||||||
See [sample_config.yaml](./sample_config.yaml) for some sample settings, as well as
|
See [sample_config.yaml](./sample_config.yaml) for some sample settings, as well as
|
||||||
the text below for example configurations for specific providers.
|
the text below for example configurations for specific providers.
|
||||||
|
|
||||||
## Sample configs
|
## Sample configs
|
||||||
@@ -52,11 +52,12 @@ the text below for example configurations for specific providers.
|
|||||||
Here are a few configs for providers that should work with Synapse.
|
Here are a few configs for providers that should work with Synapse.
|
||||||
|
|
||||||
### Microsoft Azure Active Directory
|
### Microsoft Azure Active Directory
|
||||||
Azure AD can act as an OpenID Connect Provider. Register a new application under
|
Azure AD can act as an OpenID Connect Provider. Register a new application under
|
||||||
*App registrations* in the Azure AD management console. The RedirectURI for your
|
*App registrations* in the Azure AD management console. The RedirectURI for your
|
||||||
application should point to your matrix server: `[synapse public baseurl]/_synapse/oidc/callback`
|
application should point to your matrix server:
|
||||||
|
`[synapse public baseurl]/_synapse/client/oidc/callback`
|
||||||
|
|
||||||
Go to *Certificates & secrets* and register a new client secret. Make note of your
|
Go to *Certificates & secrets* and register a new client secret. Make note of your
|
||||||
Directory (tenant) ID as it will be used in the Azure links.
|
Directory (tenant) ID as it will be used in the Azure links.
|
||||||
Edit your Synapse config file and change the `oidc_config` section:
|
Edit your Synapse config file and change the `oidc_config` section:
|
||||||
|
|
||||||
@@ -94,7 +95,7 @@ staticClients:
|
|||||||
- id: synapse
|
- id: synapse
|
||||||
secret: secret
|
secret: secret
|
||||||
redirectURIs:
|
redirectURIs:
|
||||||
- '[synapse public baseurl]/_synapse/oidc/callback'
|
- '[synapse public baseurl]/_synapse/client/oidc/callback'
|
||||||
name: 'Synapse'
|
name: 'Synapse'
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -118,7 +119,7 @@ oidc_providers:
|
|||||||
```
|
```
|
||||||
### [Keycloak][keycloak-idp]
|
### [Keycloak][keycloak-idp]
|
||||||
|
|
||||||
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
|
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
|
||||||
|
|
||||||
Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm.
|
Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm.
|
||||||
|
|
||||||
@@ -140,7 +141,7 @@ Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to
|
|||||||
| Enabled | `On` |
|
| Enabled | `On` |
|
||||||
| Client Protocol | `openid-connect` |
|
| Client Protocol | `openid-connect` |
|
||||||
| Access Type | `confidential` |
|
| Access Type | `confidential` |
|
||||||
| Valid Redirect URIs | `[synapse public baseurl]/_synapse/oidc/callback` |
|
| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` |
|
||||||
|
|
||||||
5. Click `Save`
|
5. Click `Save`
|
||||||
6. On the Credentials tab, update the fields:
|
6. On the Credentials tab, update the fields:
|
||||||
@@ -168,7 +169,7 @@ oidc_providers:
|
|||||||
### [Auth0][auth0]
|
### [Auth0][auth0]
|
||||||
|
|
||||||
1. Create a regular web application for Synapse
|
1. Create a regular web application for Synapse
|
||||||
2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/oidc/callback`
|
2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback`
|
||||||
3. Add a rule to add the `preferred_username` claim.
|
3. Add a rule to add the `preferred_username` claim.
|
||||||
<details>
|
<details>
|
||||||
<summary>Code sample</summary>
|
<summary>Code sample</summary>
|
||||||
@@ -194,7 +195,7 @@ Synapse config:
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
oidc_providers:
|
oidc_providers:
|
||||||
- idp_id: auth0
|
- idp_id: auth0
|
||||||
idp_name: Auth0
|
idp_name: Auth0
|
||||||
issuer: "https://your-tier.eu.auth0.com/" # TO BE FILLED
|
issuer: "https://your-tier.eu.auth0.com/" # TO BE FILLED
|
||||||
client_id: "your-client-id" # TO BE FILLED
|
client_id: "your-client-id" # TO BE FILLED
|
||||||
@@ -217,7 +218,7 @@ login mechanism needs an attribute to uniquely identify users, and that endpoint
|
|||||||
does not return a `sub` property, an alternative `subject_claim` has to be set.
|
does not return a `sub` property, an alternative `subject_claim` has to be set.
|
||||||
|
|
||||||
1. Create a new OAuth application: https://github.com/settings/applications/new.
|
1. Create a new OAuth application: https://github.com/settings/applications/new.
|
||||||
2. Set the callback URL to `[synapse public baseurl]/_synapse/oidc/callback`.
|
2. Set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`.
|
||||||
|
|
||||||
Synapse config:
|
Synapse config:
|
||||||
|
|
||||||
@@ -225,6 +226,7 @@ Synapse config:
|
|||||||
oidc_providers:
|
oidc_providers:
|
||||||
- idp_id: github
|
- idp_id: github
|
||||||
idp_name: Github
|
idp_name: Github
|
||||||
|
idp_brand: "org.matrix.github" # optional: styling hint for clients
|
||||||
discover: false
|
discover: false
|
||||||
issuer: "https://github.com/"
|
issuer: "https://github.com/"
|
||||||
client_id: "your-client-id" # TO BE FILLED
|
client_id: "your-client-id" # TO BE FILLED
|
||||||
@@ -250,6 +252,7 @@ oidc_providers:
|
|||||||
oidc_providers:
|
oidc_providers:
|
||||||
- idp_id: google
|
- idp_id: google
|
||||||
idp_name: Google
|
idp_name: Google
|
||||||
|
idp_brand: "org.matrix.google" # optional: styling hint for clients
|
||||||
issuer: "https://accounts.google.com/"
|
issuer: "https://accounts.google.com/"
|
||||||
client_id: "your-client-id" # TO BE FILLED
|
client_id: "your-client-id" # TO BE FILLED
|
||||||
client_secret: "your-client-secret" # TO BE FILLED
|
client_secret: "your-client-secret" # TO BE FILLED
|
||||||
@@ -260,13 +263,13 @@ oidc_providers:
|
|||||||
display_name_template: "{{ user.name }}"
|
display_name_template: "{{ user.name }}"
|
||||||
```
|
```
|
||||||
4. Back in the Google console, add this Authorized redirect URI: `[synapse
|
4. Back in the Google console, add this Authorized redirect URI: `[synapse
|
||||||
public baseurl]/_synapse/oidc/callback`.
|
public baseurl]/_synapse/client/oidc/callback`.
|
||||||
|
|
||||||
### Twitch
|
### Twitch
|
||||||
|
|
||||||
1. Setup a developer account on [Twitch](https://dev.twitch.tv/)
|
1. Setup a developer account on [Twitch](https://dev.twitch.tv/)
|
||||||
2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/)
|
2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/)
|
||||||
3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/oidc/callback`
|
3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
|
||||||
|
|
||||||
Synapse config:
|
Synapse config:
|
||||||
|
|
||||||
@@ -288,7 +291,7 @@ oidc_providers:
|
|||||||
|
|
||||||
1. Create a [new application](https://gitlab.com/profile/applications).
|
1. Create a [new application](https://gitlab.com/profile/applications).
|
||||||
2. Add the `read_user` and `openid` scopes.
|
2. Add the `read_user` and `openid` scopes.
|
||||||
3. Add this Callback URL: `[synapse public baseurl]/_synapse/oidc/callback`
|
3. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
|
||||||
|
|
||||||
Synapse config:
|
Synapse config:
|
||||||
|
|
||||||
@@ -296,6 +299,7 @@ Synapse config:
|
|||||||
oidc_providers:
|
oidc_providers:
|
||||||
- idp_id: gitlab
|
- idp_id: gitlab
|
||||||
idp_name: Gitlab
|
idp_name: Gitlab
|
||||||
|
idp_brand: "org.matrix.gitlab" # optional: styling hint for clients
|
||||||
issuer: "https://gitlab.com/"
|
issuer: "https://gitlab.com/"
|
||||||
client_id: "your-client-id" # TO BE FILLED
|
client_id: "your-client-id" # TO BE FILLED
|
||||||
client_secret: "your-client-secret" # TO BE FILLED
|
client_secret: "your-client-secret" # TO BE FILLED
|
||||||
@@ -307,3 +311,102 @@ oidc_providers:
|
|||||||
localpart_template: '{{ user.nickname }}'
|
localpart_template: '{{ user.nickname }}'
|
||||||
display_name_template: '{{ user.name }}'
|
display_name_template: '{{ user.name }}'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Facebook
|
||||||
|
|
||||||
|
Like Github, Facebook provide a custom OAuth2 API rather than an OIDC-compliant
|
||||||
|
one so requires a little more configuration.
|
||||||
|
|
||||||
|
0. You will need a Facebook developer account. You can register for one
|
||||||
|
[here](https://developers.facebook.com/async/registration/).
|
||||||
|
1. On the [apps](https://developers.facebook.com/apps/) page of the developer
|
||||||
|
console, "Create App", and choose "Build Connected Experiences".
|
||||||
|
2. Once the app is created, add "Facebook Login" and choose "Web". You don't
|
||||||
|
need to go through the whole form here.
|
||||||
|
3. In the left-hand menu, open "Products"/"Facebook Login"/"Settings".
|
||||||
|
* Add `[synapse public baseurl]/_synapse/client/oidc/callback` as an OAuth Redirect
|
||||||
|
URL.
|
||||||
|
4. In the left-hand menu, open "Settings/Basic". Here you can copy the "App ID"
|
||||||
|
and "App Secret" for use below.
|
||||||
|
|
||||||
|
Synapse config:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- idp_id: facebook
|
||||||
|
idp_name: Facebook
|
||||||
|
idp_brand: "org.matrix.facebook" # optional: styling hint for clients
|
||||||
|
discover: false
|
||||||
|
issuer: "https://facebook.com"
|
||||||
|
client_id: "your-client-id" # TO BE FILLED
|
||||||
|
client_secret: "your-client-secret" # TO BE FILLED
|
||||||
|
scopes: ["openid", "email"]
|
||||||
|
authorization_endpoint: https://facebook.com/dialog/oauth
|
||||||
|
token_endpoint: https://graph.facebook.com/v9.0/oauth/access_token
|
||||||
|
user_profile_method: "userinfo_endpoint"
|
||||||
|
userinfo_endpoint: "https://graph.facebook.com/v9.0/me?fields=id,name,email,picture"
|
||||||
|
user_mapping_provider:
|
||||||
|
config:
|
||||||
|
subject_claim: "id"
|
||||||
|
display_name_template: "{{ user.name }}"
|
||||||
|
```
|
||||||
|
|
||||||
|
Relevant documents:
|
||||||
|
* https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow
|
||||||
|
* Using Facebook's Graph API: https://developers.facebook.com/docs/graph-api/using-graph-api/
|
||||||
|
* Reference to the User endpoint: https://developers.facebook.com/docs/graph-api/reference/user
|
||||||
|
|
||||||
|
### Gitea
|
||||||
|
|
||||||
|
Gitea is, like Github, not an OpenID provider, but just an OAuth2 provider.
|
||||||
|
|
||||||
|
The [`/user` API endpoint](https://try.gitea.io/api/swagger#/user/userGetCurrent)
|
||||||
|
can be used to retrieve information on the authenticated user. As the Synapse
|
||||||
|
login mechanism needs an attribute to uniquely identify users, and that endpoint
|
||||||
|
does not return a `sub` property, an alternative `subject_claim` has to be set.
|
||||||
|
|
||||||
|
1. Create a new application.
|
||||||
|
2. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
|
||||||
|
|
||||||
|
Synapse config:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
oidc_providers:
|
||||||
|
- idp_id: gitea
|
||||||
|
idp_name: Gitea
|
||||||
|
discover: false
|
||||||
|
issuer: "https://your-gitea.com/"
|
||||||
|
client_id: "your-client-id" # TO BE FILLED
|
||||||
|
client_secret: "your-client-secret" # TO BE FILLED
|
||||||
|
client_auth_method: client_secret_post
|
||||||
|
scopes: [] # Gitea doesn't support Scopes
|
||||||
|
authorization_endpoint: "https://your-gitea.com/login/oauth/authorize"
|
||||||
|
token_endpoint: "https://your-gitea.com/login/oauth/access_token"
|
||||||
|
userinfo_endpoint: "https://your-gitea.com/api/v1/user"
|
||||||
|
user_mapping_provider:
|
||||||
|
config:
|
||||||
|
subject_claim: "id"
|
||||||
|
localpart_template: "{{ user.login }}"
|
||||||
|
display_name_template: "{{ user.full_name }}"
|
||||||
|
```
|
||||||
|
|
||||||
|
### XWiki
|
||||||
|
|
||||||
|
Install [OpenID Connect Provider](https://extensions.xwiki.org/xwiki/bin/view/Extension/OpenID%20Connect/OpenID%20Connect%20Provider/) extension in your [XWiki](https://www.xwiki.org) instance.
|
||||||
|
|
||||||
|
Synapse config:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
oidc_providers:
|
||||||
|
- idp_id: xwiki
|
||||||
|
idp_name: "XWiki"
|
||||||
|
issuer: "https://myxwikihost/xwiki/oidc/"
|
||||||
|
client_id: "your-client-id" # TO BE FILLED
|
||||||
|
# Needed until https://github.com/matrix-org/synapse/issues/9212 is fixed
|
||||||
|
client_secret: "dontcare"
|
||||||
|
scopes: ["openid", "profile"]
|
||||||
|
user_profile_method: "userinfo_endpoint"
|
||||||
|
user_mapping_provider:
|
||||||
|
config:
|
||||||
|
localpart_template: "{{ user.preferred_username }}"
|
||||||
|
display_name_template: "{{ user.name }}"
|
||||||
|
```
|
||||||
|
|||||||
@@ -9,23 +9,23 @@ of doing so is that it means that you can expose the default https port
|
|||||||
(443) to Matrix clients without needing to run Synapse with root
|
(443) to Matrix clients without needing to run Synapse with root
|
||||||
privileges.
|
privileges.
|
||||||
|
|
||||||
**NOTE**: Your reverse proxy must not `canonicalise` or `normalise`
|
You should configure your reverse proxy to forward requests to `/_matrix` or
|
||||||
the requested URI in any way (for example, by decoding `%xx` escapes).
|
`/_synapse/client` to Synapse, and have it set the `X-Forwarded-For` and
|
||||||
Beware that Apache *will* canonicalise URIs unless you specify
|
`X-Forwarded-Proto` request headers.
|
||||||
`nocanon`.
|
|
||||||
|
|
||||||
When setting up a reverse proxy, remember that Matrix clients and other
|
You should remember that Matrix clients and other Matrix servers do not
|
||||||
Matrix servers do not necessarily need to connect to your server via the
|
necessarily need to connect to your server via the same server name or
|
||||||
same server name or port. Indeed, clients will use port 443 by default,
|
port. Indeed, clients will use port 443 by default, whereas servers default to
|
||||||
whereas servers default to port 8448. Where these are different, we
|
port 8448. Where these are different, we refer to the 'client port' and the
|
||||||
refer to the 'client port' and the 'federation port'. See [the Matrix
|
'federation port'. See [the Matrix
|
||||||
specification](https://matrix.org/docs/spec/server_server/latest#resolving-server-names)
|
specification](https://matrix.org/docs/spec/server_server/latest#resolving-server-names)
|
||||||
for more details of the algorithm used for federation connections, and
|
for more details of the algorithm used for federation connections, and
|
||||||
[delegate.md](<delegate.md>) for instructions on setting up delegation.
|
[delegate.md](<delegate.md>) for instructions on setting up delegation.
|
||||||
|
|
||||||
Endpoints that are part of the standardised Matrix specification are
|
**NOTE**: Your reverse proxy must not `canonicalise` or `normalise`
|
||||||
located under `/_matrix`, whereas endpoints specific to Synapse are
|
the requested URI in any way (for example, by decoding `%xx` escapes).
|
||||||
located under `/_synapse/client`.
|
Beware that Apache *will* canonicalise URIs unless you specify
|
||||||
|
`nocanon`.
|
||||||
|
|
||||||
Let's assume that we expect clients to connect to our server at
|
Let's assume that we expect clients to connect to our server at
|
||||||
`https://matrix.example.com`, and other servers to connect at
|
`https://matrix.example.com`, and other servers to connect at
|
||||||
@@ -40,18 +40,21 @@ the reverse proxy and the homeserver.
|
|||||||
|
|
||||||
```
|
```
|
||||||
server {
|
server {
|
||||||
listen 443 ssl;
|
listen 443 ssl http2;
|
||||||
listen [::]:443 ssl;
|
listen [::]:443 ssl http2;
|
||||||
|
|
||||||
# For the federation port
|
# For the federation port
|
||||||
listen 8448 ssl default_server;
|
listen 8448 ssl http2 default_server;
|
||||||
listen [::]:8448 ssl default_server;
|
listen [::]:8448 ssl http2 default_server;
|
||||||
|
|
||||||
server_name matrix.example.com;
|
server_name matrix.example.com;
|
||||||
|
|
||||||
location ~* ^(\/_matrix|\/_synapse\/client) {
|
location ~* ^(\/_matrix|\/_synapse\/client) {
|
||||||
proxy_pass http://localhost:8008;
|
proxy_pass http://localhost:8008;
|
||||||
proxy_set_header X-Forwarded-For $remote_addr;
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
|
||||||
# Nginx by default only allows file uploads up to 1M in size
|
# Nginx by default only allows file uploads up to 1M in size
|
||||||
# Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
# Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
||||||
client_max_body_size 50M;
|
client_max_body_size 50M;
|
||||||
@@ -102,6 +105,7 @@ example.com:8448 {
|
|||||||
SSLEngine on
|
SSLEngine on
|
||||||
ServerName matrix.example.com;
|
ServerName matrix.example.com;
|
||||||
|
|
||||||
|
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||||
AllowEncodedSlashes NoDecode
|
AllowEncodedSlashes NoDecode
|
||||||
ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon
|
ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon
|
||||||
ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix
|
ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix
|
||||||
@@ -113,6 +117,7 @@ example.com:8448 {
|
|||||||
SSLEngine on
|
SSLEngine on
|
||||||
ServerName example.com;
|
ServerName example.com;
|
||||||
|
|
||||||
|
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||||
AllowEncodedSlashes NoDecode
|
AllowEncodedSlashes NoDecode
|
||||||
ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon
|
ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon
|
||||||
ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix
|
ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix
|
||||||
@@ -134,6 +139,9 @@ example.com:8448 {
|
|||||||
```
|
```
|
||||||
frontend https
|
frontend https
|
||||||
bind :::443 v4v6 ssl crt /etc/ssl/haproxy/ strict-sni alpn h2,http/1.1
|
bind :::443 v4v6 ssl crt /etc/ssl/haproxy/ strict-sni alpn h2,http/1.1
|
||||||
|
http-request set-header X-Forwarded-Proto https if { ssl_fc }
|
||||||
|
http-request set-header X-Forwarded-Proto http if !{ ssl_fc }
|
||||||
|
http-request set-header X-Forwarded-For %[src]
|
||||||
|
|
||||||
# Matrix client traffic
|
# Matrix client traffic
|
||||||
acl matrix-host hdr(host) -i matrix.example.com
|
acl matrix-host hdr(host) -i matrix.example.com
|
||||||
@@ -144,6 +152,10 @@ frontend https
|
|||||||
|
|
||||||
frontend matrix-federation
|
frontend matrix-federation
|
||||||
bind :::8448 v4v6 ssl crt /etc/ssl/haproxy/synapse.pem alpn h2,http/1.1
|
bind :::8448 v4v6 ssl crt /etc/ssl/haproxy/synapse.pem alpn h2,http/1.1
|
||||||
|
http-request set-header X-Forwarded-Proto https if { ssl_fc }
|
||||||
|
http-request set-header X-Forwarded-Proto http if !{ ssl_fc }
|
||||||
|
http-request set-header X-Forwarded-For %[src]
|
||||||
|
|
||||||
default_backend matrix
|
default_backend matrix
|
||||||
|
|
||||||
backend matrix
|
backend matrix
|
||||||
|
|||||||
@@ -74,10 +74,6 @@ pid_file: DATADIR/homeserver.pid
|
|||||||
# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
|
# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
|
||||||
# 'listeners' below).
|
# 'listeners' below).
|
||||||
#
|
#
|
||||||
# If this is left unset, it defaults to 'https://<server_name>/'. (Note that
|
|
||||||
# that will not work unless you configure Synapse or a reverse-proxy to listen
|
|
||||||
# on port 443.)
|
|
||||||
#
|
|
||||||
#public_baseurl: https://example.com/
|
#public_baseurl: https://example.com/
|
||||||
|
|
||||||
# Set the soft limit on the number of file descriptors synapse can use
|
# Set the soft limit on the number of file descriptors synapse can use
|
||||||
@@ -105,6 +101,14 @@ pid_file: DATADIR/homeserver.pid
|
|||||||
#
|
#
|
||||||
#limit_profile_requests_to_users_who_share_rooms: true
|
#limit_profile_requests_to_users_who_share_rooms: true
|
||||||
|
|
||||||
|
# Uncomment to prevent a user's profile data from being retrieved and
|
||||||
|
# displayed in a room until they have joined it. By default, a user's
|
||||||
|
# profile data is included in an invite event, regardless of the values
|
||||||
|
# of the above two settings, and whether or not the users share a server.
|
||||||
|
# Defaults to 'true'.
|
||||||
|
#
|
||||||
|
#include_profile_data_on_invite: false
|
||||||
|
|
||||||
# If set to 'true', removes the need for authentication to access the server's
|
# If set to 'true', removes the need for authentication to access the server's
|
||||||
# public rooms directory through the client API, meaning that anyone can
|
# public rooms directory through the client API, meaning that anyone can
|
||||||
# query the room directory. Defaults to 'false'.
|
# query the room directory. Defaults to 'false'.
|
||||||
@@ -169,6 +173,7 @@ pid_file: DATADIR/homeserver.pid
|
|||||||
# - '100.64.0.0/10'
|
# - '100.64.0.0/10'
|
||||||
# - '192.0.0.0/24'
|
# - '192.0.0.0/24'
|
||||||
# - '169.254.0.0/16'
|
# - '169.254.0.0/16'
|
||||||
|
# - '192.88.99.0/24'
|
||||||
# - '198.18.0.0/15'
|
# - '198.18.0.0/15'
|
||||||
# - '192.0.2.0/24'
|
# - '192.0.2.0/24'
|
||||||
# - '198.51.100.0/24'
|
# - '198.51.100.0/24'
|
||||||
@@ -177,6 +182,9 @@ pid_file: DATADIR/homeserver.pid
|
|||||||
# - '::1/128'
|
# - '::1/128'
|
||||||
# - 'fe80::/10'
|
# - 'fe80::/10'
|
||||||
# - 'fc00::/7'
|
# - 'fc00::/7'
|
||||||
|
# - '2001:db8::/32'
|
||||||
|
# - 'ff00::/8'
|
||||||
|
# - 'fec0::/10'
|
||||||
|
|
||||||
# List of IP address CIDR ranges that should be allowed for federation,
|
# List of IP address CIDR ranges that should be allowed for federation,
|
||||||
# identity servers, push servers, and for checking key validity for
|
# identity servers, push servers, and for checking key validity for
|
||||||
@@ -699,6 +707,12 @@ acme:
|
|||||||
# - matrix.org
|
# - matrix.org
|
||||||
# - example.com
|
# - example.com
|
||||||
|
|
||||||
|
# Uncomment to disable profile lookup over federation. By default, the
|
||||||
|
# Federation API allows other homeservers to obtain profile data of any user
|
||||||
|
# on this homeserver. Defaults to 'true'.
|
||||||
|
#
|
||||||
|
#allow_profile_lookup_over_federation: false
|
||||||
|
|
||||||
|
|
||||||
## Caching ##
|
## Caching ##
|
||||||
|
|
||||||
@@ -824,6 +838,9 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
|||||||
# users are joining rooms the server is already in (this is cheap) vs
|
# users are joining rooms the server is already in (this is cheap) vs
|
||||||
# "remote" for when users are trying to join rooms not on the server (which
|
# "remote" for when users are trying to join rooms not on the server (which
|
||||||
# can be more expensive)
|
# can be more expensive)
|
||||||
|
# - one for ratelimiting how often a user or IP can attempt to validate a 3PID.
|
||||||
|
# - two for ratelimiting how often invites can be sent in a room or to a
|
||||||
|
# specific user.
|
||||||
#
|
#
|
||||||
# The defaults are as shown below.
|
# The defaults are as shown below.
|
||||||
#
|
#
|
||||||
@@ -857,7 +874,18 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
|||||||
# remote:
|
# remote:
|
||||||
# per_second: 0.01
|
# per_second: 0.01
|
||||||
# burst_count: 3
|
# burst_count: 3
|
||||||
|
#
|
||||||
|
#rc_3pid_validation:
|
||||||
|
# per_second: 0.003
|
||||||
|
# burst_count: 5
|
||||||
|
#
|
||||||
|
#rc_invites:
|
||||||
|
# per_room:
|
||||||
|
# per_second: 0.3
|
||||||
|
# burst_count: 10
|
||||||
|
# per_user:
|
||||||
|
# per_second: 0.003
|
||||||
|
# burst_count: 5
|
||||||
|
|
||||||
# Ratelimiting settings for incoming federation
|
# Ratelimiting settings for incoming federation
|
||||||
#
|
#
|
||||||
@@ -980,6 +1008,7 @@ media_store_path: "DATADIR/media_store"
|
|||||||
# - '100.64.0.0/10'
|
# - '100.64.0.0/10'
|
||||||
# - '192.0.0.0/24'
|
# - '192.0.0.0/24'
|
||||||
# - '169.254.0.0/16'
|
# - '169.254.0.0/16'
|
||||||
|
# - '192.88.99.0/24'
|
||||||
# - '198.18.0.0/15'
|
# - '198.18.0.0/15'
|
||||||
# - '192.0.2.0/24'
|
# - '192.0.2.0/24'
|
||||||
# - '198.51.100.0/24'
|
# - '198.51.100.0/24'
|
||||||
@@ -988,6 +1017,9 @@ media_store_path: "DATADIR/media_store"
|
|||||||
# - '::1/128'
|
# - '::1/128'
|
||||||
# - 'fe80::/10'
|
# - 'fe80::/10'
|
||||||
# - 'fc00::/7'
|
# - 'fc00::/7'
|
||||||
|
# - '2001:db8::/32'
|
||||||
|
# - 'ff00::/8'
|
||||||
|
# - 'fec0::/10'
|
||||||
|
|
||||||
# List of IP address CIDR ranges that the URL preview spider is allowed
|
# List of IP address CIDR ranges that the URL preview spider is allowed
|
||||||
# to access even if they are specified in url_preview_ip_range_blacklist.
|
# to access even if they are specified in url_preview_ip_range_blacklist.
|
||||||
@@ -1155,9 +1187,8 @@ account_validity:
|
|||||||
# send an email to the account's email address with a renewal link. By
|
# send an email to the account's email address with a renewal link. By
|
||||||
# default, no such emails are sent.
|
# default, no such emails are sent.
|
||||||
#
|
#
|
||||||
# If you enable this setting, you will also need to fill out the 'email'
|
# If you enable this setting, you will also need to fill out the 'email' and
|
||||||
# configuration section. You should also check that 'public_baseurl' is set
|
# 'public_baseurl' configuration sections.
|
||||||
# correctly.
|
|
||||||
#
|
#
|
||||||
#renew_at: 1w
|
#renew_at: 1w
|
||||||
|
|
||||||
@@ -1248,7 +1279,8 @@ account_validity:
|
|||||||
# The identity server which we suggest that clients should use when users log
|
# The identity server which we suggest that clients should use when users log
|
||||||
# in on this server.
|
# in on this server.
|
||||||
#
|
#
|
||||||
# (By default, no suggestion is made, so it is left up to the client.)
|
# (By default, no suggestion is made, so it is left up to the client.
|
||||||
|
# This setting is ignored unless public_baseurl is also set.)
|
||||||
#
|
#
|
||||||
#default_identity_server: https://matrix.org
|
#default_identity_server: https://matrix.org
|
||||||
|
|
||||||
@@ -1273,6 +1305,8 @@ account_validity:
|
|||||||
# by the Matrix Identity Service API specification:
|
# by the Matrix Identity Service API specification:
|
||||||
# https://matrix.org/docs/spec/identity_service/latest
|
# https://matrix.org/docs/spec/identity_service/latest
|
||||||
#
|
#
|
||||||
|
# If a delegate is specified, the config option public_baseurl must also be filled out.
|
||||||
|
#
|
||||||
account_threepid_delegates:
|
account_threepid_delegates:
|
||||||
#email: https://example.com # Delegate email sending to example.com
|
#email: https://example.com # Delegate email sending to example.com
|
||||||
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
||||||
@@ -1306,6 +1340,8 @@ account_threepid_delegates:
|
|||||||
# By default, any room aliases included in this list will be created
|
# By default, any room aliases included in this list will be created
|
||||||
# as a publicly joinable room when the first user registers for the
|
# as a publicly joinable room when the first user registers for the
|
||||||
# homeserver. This behaviour can be customised with the settings below.
|
# homeserver. This behaviour can be customised with the settings below.
|
||||||
|
# If the room already exists, make certain it is a publicly joinable
|
||||||
|
# room. The join rule of the room must be set to 'public'.
|
||||||
#
|
#
|
||||||
#auto_join_rooms:
|
#auto_join_rooms:
|
||||||
# - "#example:example.com"
|
# - "#example:example.com"
|
||||||
@@ -1552,10 +1588,10 @@ trusted_key_servers:
|
|||||||
# enable SAML login.
|
# enable SAML login.
|
||||||
#
|
#
|
||||||
# Once SAML support is enabled, a metadata file will be exposed at
|
# Once SAML support is enabled, a metadata file will be exposed at
|
||||||
# https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to
|
# https://<server>:<port>/_synapse/client/saml2/metadata.xml, which you may be able to
|
||||||
# use to configure your SAML IdP with. Alternatively, you can manually configure
|
# use to configure your SAML IdP with. Alternatively, you can manually configure
|
||||||
# the IdP to use an ACS location of
|
# the IdP to use an ACS location of
|
||||||
# https://<server>:<port>/_matrix/saml2/authn_response.
|
# https://<server>:<port>/_synapse/client/saml2/authn_response.
|
||||||
#
|
#
|
||||||
saml2_config:
|
saml2_config:
|
||||||
# `sp_config` is the configuration for the pysaml2 Service Provider.
|
# `sp_config` is the configuration for the pysaml2 Service Provider.
|
||||||
@@ -1727,10 +1763,14 @@ saml2_config:
|
|||||||
# offer the user a choice of login mechanisms.
|
# offer the user a choice of login mechanisms.
|
||||||
#
|
#
|
||||||
# idp_icon: An optional icon for this identity provider, which is presented
|
# idp_icon: An optional icon for this identity provider, which is presented
|
||||||
# by identity picker pages. If given, must be an MXC URI of the format
|
# by clients and Synapse's own IdP picker page. If given, must be an
|
||||||
# mxc://<server-name>/<media-id>. (An easy way to obtain such an MXC URI
|
# MXC URI of the format mxc://<server-name>/<media-id>. (An easy way to
|
||||||
# is to upload an image to an (unencrypted) room and then copy the "url"
|
# obtain such an MXC URI is to upload an image to an (unencrypted) room
|
||||||
# from the source of the event.)
|
# and then copy the "url" from the source of the event.)
|
||||||
|
#
|
||||||
|
# idp_brand: An optional brand for this identity provider, allowing clients
|
||||||
|
# to style the login flow according to the identity provider in question.
|
||||||
|
# See the spec for possible options here.
|
||||||
#
|
#
|
||||||
# discover: set to 'false' to disable the use of the OIDC discovery mechanism
|
# discover: set to 'false' to disable the use of the OIDC discovery mechanism
|
||||||
# to discover endpoints. Defaults to true.
|
# to discover endpoints. Defaults to true.
|
||||||
@@ -1791,17 +1831,21 @@ saml2_config:
|
|||||||
#
|
#
|
||||||
# For the default provider, the following settings are available:
|
# For the default provider, the following settings are available:
|
||||||
#
|
#
|
||||||
# sub: name of the claim containing a unique identifier for the
|
# subject_claim: name of the claim containing a unique identifier
|
||||||
# user. Defaults to 'sub', which OpenID Connect compliant
|
# for the user. Defaults to 'sub', which OpenID Connect
|
||||||
# providers should provide.
|
# compliant providers should provide.
|
||||||
#
|
#
|
||||||
# localpart_template: Jinja2 template for the localpart of the MXID.
|
# localpart_template: Jinja2 template for the localpart of the MXID.
|
||||||
# If this is not set, the user will be prompted to choose their
|
# If this is not set, the user will be prompted to choose their
|
||||||
# own username.
|
# own username (see 'sso_auth_account_details.html' in the 'sso'
|
||||||
|
# section of this file).
|
||||||
#
|
#
|
||||||
# display_name_template: Jinja2 template for the display name to set
|
# display_name_template: Jinja2 template for the display name to set
|
||||||
# on first login. If unset, no displayname will be set.
|
# on first login. If unset, no displayname will be set.
|
||||||
#
|
#
|
||||||
|
# email_template: Jinja2 template for the email address of the user.
|
||||||
|
# If unset, no email address will be added to the account.
|
||||||
|
#
|
||||||
# extra_attributes: a map of Jinja2 templates for extra attributes
|
# extra_attributes: a map of Jinja2 templates for extra attributes
|
||||||
# to send back to the client during login.
|
# to send back to the client during login.
|
||||||
# Note that these are non-standard and clients will ignore them
|
# Note that these are non-standard and clients will ignore them
|
||||||
@@ -1837,6 +1881,12 @@ oidc_providers:
|
|||||||
# userinfo_endpoint: "https://accounts.example.com/userinfo"
|
# userinfo_endpoint: "https://accounts.example.com/userinfo"
|
||||||
# jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
# jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||||
# skip_verification: true
|
# skip_verification: true
|
||||||
|
# user_mapping_provider:
|
||||||
|
# config:
|
||||||
|
# subject_claim: "id"
|
||||||
|
# localpart_template: "{{ user.login }}"
|
||||||
|
# display_name_template: "{{ user.name }}"
|
||||||
|
# email_template: "{{ user.email }}"
|
||||||
|
|
||||||
# For use with Keycloak
|
# For use with Keycloak
|
||||||
#
|
#
|
||||||
@@ -1851,6 +1901,7 @@ oidc_providers:
|
|||||||
#
|
#
|
||||||
#- idp_id: github
|
#- idp_id: github
|
||||||
# idp_name: Github
|
# idp_name: Github
|
||||||
|
# idp_brand: org.matrix.github
|
||||||
# discover: false
|
# discover: false
|
||||||
# issuer: "https://github.com/"
|
# issuer: "https://github.com/"
|
||||||
# client_id: "your-client-id" # TO BE FILLED
|
# client_id: "your-client-id" # TO BE FILLED
|
||||||
@@ -1862,8 +1913,8 @@ oidc_providers:
|
|||||||
# user_mapping_provider:
|
# user_mapping_provider:
|
||||||
# config:
|
# config:
|
||||||
# subject_claim: "id"
|
# subject_claim: "id"
|
||||||
# localpart_template: "{ user.login }"
|
# localpart_template: "{{ user.login }}"
|
||||||
# display_name_template: "{ user.name }"
|
# display_name_template: "{{ user.name }}"
|
||||||
|
|
||||||
|
|
||||||
# Enable Central Authentication Service (CAS) for registration and login.
|
# Enable Central Authentication Service (CAS) for registration and login.
|
||||||
@@ -1878,10 +1929,6 @@ cas_config:
|
|||||||
#
|
#
|
||||||
#server_url: "https://cas-server.com"
|
#server_url: "https://cas-server.com"
|
||||||
|
|
||||||
# The public URL of the homeserver.
|
|
||||||
#
|
|
||||||
#service_url: "https://homeserver.domain.com:8448"
|
|
||||||
|
|
||||||
# The attribute of the CAS response to use as the display name.
|
# The attribute of the CAS response to use as the display name.
|
||||||
#
|
#
|
||||||
# If unset, no displayname will be set.
|
# If unset, no displayname will be set.
|
||||||
@@ -1913,9 +1960,9 @@ sso:
|
|||||||
# phishing attacks from evil.site. To avoid this, include a slash after the
|
# phishing attacks from evil.site. To avoid this, include a slash after the
|
||||||
# hostname: "https://my.client/".
|
# hostname: "https://my.client/".
|
||||||
#
|
#
|
||||||
# The login fallback page (used by clients that don't natively support the
|
# If public_baseurl is set, then the login fallback page (used by clients
|
||||||
# required login flows) is automatically whitelisted in addition to any URLs
|
# that don't natively support the required login flows) is whitelisted in
|
||||||
# in this list.
|
# addition to any URLs in this list.
|
||||||
#
|
#
|
||||||
# By default, this list is empty.
|
# By default, this list is empty.
|
||||||
#
|
#
|
||||||
@@ -1936,15 +1983,19 @@ sso:
|
|||||||
#
|
#
|
||||||
# When rendering, this template is given the following variables:
|
# When rendering, this template is given the following variables:
|
||||||
# * redirect_url: the URL that the user will be redirected to after
|
# * redirect_url: the URL that the user will be redirected to after
|
||||||
# login. Needs manual escaping (see
|
# login.
|
||||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
|
||||||
#
|
#
|
||||||
# * server_name: the homeserver's name.
|
# * server_name: the homeserver's name.
|
||||||
#
|
#
|
||||||
# * providers: a list of available Identity Providers. Each element is
|
# * providers: a list of available Identity Providers. Each element is
|
||||||
# an object with the following attributes:
|
# an object with the following attributes:
|
||||||
|
#
|
||||||
# * idp_id: unique identifier for the IdP
|
# * idp_id: unique identifier for the IdP
|
||||||
# * idp_name: user-facing name for the IdP
|
# * idp_name: user-facing name for the IdP
|
||||||
|
# * idp_icon: if specified in the IdP config, an MXC URI for an icon
|
||||||
|
# for the IdP
|
||||||
|
# * idp_brand: if specified in the IdP config, a textual identifier
|
||||||
|
# for the brand of the IdP
|
||||||
#
|
#
|
||||||
# The rendered HTML page should contain a form which submits its results
|
# The rendered HTML page should contain a form which submits its results
|
||||||
# back as a GET request, with the following query parameters:
|
# back as a GET request, with the following query parameters:
|
||||||
@@ -1954,33 +2005,101 @@ sso:
|
|||||||
#
|
#
|
||||||
# * idp: the 'idp_id' of the chosen IDP.
|
# * idp: the 'idp_id' of the chosen IDP.
|
||||||
#
|
#
|
||||||
|
# * HTML page to prompt new users to enter a userid and confirm other
|
||||||
|
# details: 'sso_auth_account_details.html'. This is only shown if the
|
||||||
|
# SSO implementation (with any user_mapping_provider) does not return
|
||||||
|
# a localpart.
|
||||||
|
#
|
||||||
|
# When rendering, this template is given the following variables:
|
||||||
|
#
|
||||||
|
# * server_name: the homeserver's name.
|
||||||
|
#
|
||||||
|
# * idp: details of the SSO Identity Provider that the user logged in
|
||||||
|
# with: an object with the following attributes:
|
||||||
|
#
|
||||||
|
# * idp_id: unique identifier for the IdP
|
||||||
|
# * idp_name: user-facing name for the IdP
|
||||||
|
# * idp_icon: if specified in the IdP config, an MXC URI for an icon
|
||||||
|
# for the IdP
|
||||||
|
# * idp_brand: if specified in the IdP config, a textual identifier
|
||||||
|
# for the brand of the IdP
|
||||||
|
#
|
||||||
|
# * user_attributes: an object containing details about the user that
|
||||||
|
# we received from the IdP. May have the following attributes:
|
||||||
|
#
|
||||||
|
# * display_name: the user's display_name
|
||||||
|
# * emails: a list of email addresses
|
||||||
|
#
|
||||||
|
# The template should render a form which submits the following fields:
|
||||||
|
#
|
||||||
|
# * username: the localpart of the user's chosen user id
|
||||||
|
#
|
||||||
|
# * HTML page allowing the user to consent to the server's terms and
|
||||||
|
# conditions. This is only shown for new users, and only if
|
||||||
|
# `user_consent.require_at_registration` is set.
|
||||||
|
#
|
||||||
|
# When rendering, this template is given the following variables:
|
||||||
|
#
|
||||||
|
# * server_name: the homeserver's name.
|
||||||
|
#
|
||||||
|
# * user_id: the user's matrix proposed ID.
|
||||||
|
#
|
||||||
|
# * user_profile.display_name: the user's proposed display name, if any.
|
||||||
|
#
|
||||||
|
# * consent_version: the version of the terms that the user will be
|
||||||
|
# shown
|
||||||
|
#
|
||||||
|
# * terms_url: a link to the page showing the terms.
|
||||||
|
#
|
||||||
|
# The template should render a form which submits the following fields:
|
||||||
|
#
|
||||||
|
# * accepted_version: the version of the terms accepted by the user
|
||||||
|
# (ie, 'consent_version' from the input variables).
|
||||||
|
#
|
||||||
# * HTML page for a confirmation step before redirecting back to the client
|
# * HTML page for a confirmation step before redirecting back to the client
|
||||||
# with the login token: 'sso_redirect_confirm.html'.
|
# with the login token: 'sso_redirect_confirm.html'.
|
||||||
#
|
#
|
||||||
# When rendering, this template is given three variables:
|
# When rendering, this template is given the following variables:
|
||||||
# * redirect_url: the URL the user is about to be redirected to. Needs
|
#
|
||||||
# manual escaping (see
|
# * redirect_url: the URL the user is about to be redirected to.
|
||||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
|
||||||
#
|
#
|
||||||
# * display_url: the same as `redirect_url`, but with the query
|
# * display_url: the same as `redirect_url`, but with the query
|
||||||
# parameters stripped. The intention is to have a
|
# parameters stripped. The intention is to have a
|
||||||
# human-readable URL to show to users, not to use it as
|
# human-readable URL to show to users, not to use it as
|
||||||
# the final address to redirect to. Needs manual escaping
|
# the final address to redirect to.
|
||||||
# (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
|
||||||
#
|
#
|
||||||
# * server_name: the homeserver's name.
|
# * server_name: the homeserver's name.
|
||||||
#
|
#
|
||||||
|
# * new_user: a boolean indicating whether this is the user's first time
|
||||||
|
# logging in.
|
||||||
|
#
|
||||||
|
# * user_id: the user's matrix ID.
|
||||||
|
#
|
||||||
|
# * user_profile.avatar_url: an MXC URI for the user's avatar, if any.
|
||||||
|
# None if the user has not set an avatar.
|
||||||
|
#
|
||||||
|
# * user_profile.display_name: the user's display name. None if the user
|
||||||
|
# has not set a display name.
|
||||||
|
#
|
||||||
# * HTML page which notifies the user that they are authenticating to confirm
|
# * HTML page which notifies the user that they are authenticating to confirm
|
||||||
# an operation on their account during the user interactive authentication
|
# an operation on their account during the user interactive authentication
|
||||||
# process: 'sso_auth_confirm.html'.
|
# process: 'sso_auth_confirm.html'.
|
||||||
#
|
#
|
||||||
# When rendering, this template is given the following variables:
|
# When rendering, this template is given the following variables:
|
||||||
# * redirect_url: the URL the user is about to be redirected to. Needs
|
# * redirect_url: the URL the user is about to be redirected to.
|
||||||
# manual escaping (see
|
|
||||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
|
||||||
#
|
#
|
||||||
# * description: the operation which the user is being asked to confirm
|
# * description: the operation which the user is being asked to confirm
|
||||||
#
|
#
|
||||||
|
# * idp: details of the Identity Provider that we will use to confirm
|
||||||
|
# the user's identity: an object with the following attributes:
|
||||||
|
#
|
||||||
|
# * idp_id: unique identifier for the IdP
|
||||||
|
# * idp_name: user-facing name for the IdP
|
||||||
|
# * idp_icon: if specified in the IdP config, an MXC URI for an icon
|
||||||
|
# for the IdP
|
||||||
|
# * idp_brand: if specified in the IdP config, a textual identifier
|
||||||
|
# for the brand of the IdP
|
||||||
|
#
|
||||||
# * HTML page shown after a successful user interactive authentication session:
|
# * HTML page shown after a successful user interactive authentication session:
|
||||||
# 'sso_auth_success.html'.
|
# 'sso_auth_success.html'.
|
||||||
#
|
#
|
||||||
@@ -2123,11 +2242,11 @@ password_config:
|
|||||||
#require_uppercase: true
|
#require_uppercase: true
|
||||||
|
|
||||||
ui_auth:
|
ui_auth:
|
||||||
# The number of milliseconds to allow a user-interactive authentication
|
# The amount of time to allow a user-interactive authentication session
|
||||||
# session to be active.
|
# to be active.
|
||||||
#
|
#
|
||||||
# This defaults to 0, meaning the user is queried for their credentials
|
# This defaults to 0, meaning the user is queried for their credentials
|
||||||
# before every action, but this can be overridden to alow a single
|
# before every action, but this can be overridden to allow a single
|
||||||
# validation to be re-used. This weakens the protections afforded by
|
# validation to be re-used. This weakens the protections afforded by
|
||||||
# the user-interactive authentication process, by allowing for multiple
|
# the user-interactive authentication process, by allowing for multiple
|
||||||
# (and potentially different) operations to use the same validation session.
|
# (and potentially different) operations to use the same validation session.
|
||||||
@@ -2135,7 +2254,7 @@ ui_auth:
|
|||||||
# Uncomment below to allow for credential validation to last for 15
|
# Uncomment below to allow for credential validation to last for 15
|
||||||
# seconds.
|
# seconds.
|
||||||
#
|
#
|
||||||
#session_timeout: 15000
|
#session_timeout: "15s"
|
||||||
|
|
||||||
|
|
||||||
# Configuration for sending emails from Synapse.
|
# Configuration for sending emails from Synapse.
|
||||||
@@ -2425,19 +2544,35 @@ spam_checker:
|
|||||||
|
|
||||||
# User Directory configuration
|
# User Directory configuration
|
||||||
#
|
#
|
||||||
# 'enabled' defines whether users can search the user directory. If
|
user_directory:
|
||||||
# false then empty responses are returned to all queries. Defaults to
|
# Defines whether users can search the user directory. If false then
|
||||||
# true.
|
# empty responses are returned to all queries. Defaults to true.
|
||||||
#
|
#
|
||||||
# 'search_all_users' defines whether to search all users visible to your HS
|
# Uncomment to disable the user directory.
|
||||||
# when searching the user directory, rather than limiting to users visible
|
#
|
||||||
# in public rooms. Defaults to false. If you set it True, you'll have to
|
#enabled: false
|
||||||
# rebuild the user_directory search indexes, see
|
|
||||||
# https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
|
# Defines whether to search all users visible to your HS when searching
|
||||||
#
|
# the user directory, rather than limiting to users visible in public
|
||||||
#user_directory:
|
# rooms. Defaults to false.
|
||||||
# enabled: true
|
#
|
||||||
# search_all_users: false
|
# If you set it true, you'll have to rebuild the user_directory search
|
||||||
|
# indexes, see:
|
||||||
|
# https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
|
||||||
|
#
|
||||||
|
# Uncomment to return search results containing all known users, even if that
|
||||||
|
# user does not share a room with the requester.
|
||||||
|
#
|
||||||
|
#search_all_users: true
|
||||||
|
|
||||||
|
# Defines whether to prefer local users in search query results.
|
||||||
|
# If True, local users are more likely to appear above remote users
|
||||||
|
# when searching the user directory. Defaults to false.
|
||||||
|
#
|
||||||
|
# Uncomment to prefer local over remote users in user directory search
|
||||||
|
# results.
|
||||||
|
#
|
||||||
|
#prefer_local_users: true
|
||||||
|
|
||||||
|
|
||||||
# User Consent configuration
|
# User Consent configuration
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ well as some specific methods:
|
|||||||
* `check_username_for_spam`
|
* `check_username_for_spam`
|
||||||
* `check_registration_for_spam`
|
* `check_registration_for_spam`
|
||||||
|
|
||||||
The details of the each of these methods (as well as their inputs and outputs)
|
The details of each of these methods (as well as their inputs and outputs)
|
||||||
are documented in the `synapse.events.spamcheck.SpamChecker` class.
|
are documented in the `synapse.events.spamcheck.SpamChecker` class.
|
||||||
|
|
||||||
The `ModuleApi` class provides a way for the custom spam checker class to
|
The `ModuleApi` class provides a way for the custom spam checker class to
|
||||||
@@ -61,6 +61,9 @@ class ExampleSpamChecker:
|
|||||||
|
|
||||||
async def check_registration_for_spam(self, email_threepid, username, request_info):
|
async def check_registration_for_spam(self, email_threepid, username, request_info):
|
||||||
return RegistrationBehaviour.ALLOW # allow all registrations
|
return RegistrationBehaviour.ALLOW # allow all registrations
|
||||||
|
|
||||||
|
async def check_media_file_for_spam(self, file_wrapper, file_info):
|
||||||
|
return False # allow all media
|
||||||
```
|
```
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ AssertPathExists=/etc/matrix-synapse/workers/%i.yaml
|
|||||||
|
|
||||||
# This service should be restarted when the synapse target is restarted.
|
# This service should be restarted when the synapse target is restarted.
|
||||||
PartOf=matrix-synapse.target
|
PartOf=matrix-synapse.target
|
||||||
|
ReloadPropagatedFrom=matrix-synapse.target
|
||||||
|
|
||||||
# if this is started at the same time as the main, let the main process start
|
# if this is started at the same time as the main, let the main process start
|
||||||
# first, to initialise the database schema.
|
# first, to initialise the database schema.
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ Description=Synapse master
|
|||||||
|
|
||||||
# This service should be restarted when the synapse target is restarted.
|
# This service should be restarted when the synapse target is restarted.
|
||||||
PartOf=matrix-synapse.target
|
PartOf=matrix-synapse.target
|
||||||
|
ReloadPropagatedFrom=matrix-synapse.target
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=notify
|
Type=notify
|
||||||
|
|||||||
@@ -220,10 +220,6 @@ Asks the server for the current position of all streams.
|
|||||||
|
|
||||||
Acknowledge receipt of some federation data
|
Acknowledge receipt of some federation data
|
||||||
|
|
||||||
#### REMOVE_PUSHER (C)
|
|
||||||
|
|
||||||
Inform the server a pusher should be removed
|
|
||||||
|
|
||||||
### REMOTE_SERVER_UP (S, C)
|
### REMOTE_SERVER_UP (S, C)
|
||||||
|
|
||||||
Inform other processes that a remote server may have come back online.
|
Inform other processes that a remote server may have come back online.
|
||||||
|
|||||||
@@ -187,7 +187,7 @@ After updating the homeserver configuration, you must restart synapse:
|
|||||||
```
|
```
|
||||||
* If you use systemd:
|
* If you use systemd:
|
||||||
```
|
```
|
||||||
systemctl restart synapse.service
|
systemctl restart matrix-synapse.service
|
||||||
```
|
```
|
||||||
... and then reload any clients (or wait an hour for them to refresh their
|
... and then reload any clients (or wait an hour for them to refresh their
|
||||||
settings).
|
settings).
|
||||||
|
|||||||
@@ -40,6 +40,9 @@ which relays replication commands between processes. This can give a significant
|
|||||||
cpu saving on the main process and will be a prerequisite for upcoming
|
cpu saving on the main process and will be a prerequisite for upcoming
|
||||||
performance improvements.
|
performance improvements.
|
||||||
|
|
||||||
|
If Redis support is enabled Synapse will use it as a shared cache, as well as a
|
||||||
|
pub/sub mechanism.
|
||||||
|
|
||||||
See the [Architectural diagram](#architectural-diagram) section at the end for
|
See the [Architectural diagram](#architectural-diagram) section at the end for
|
||||||
a visualisation of what this looks like.
|
a visualisation of what this looks like.
|
||||||
|
|
||||||
@@ -225,7 +228,6 @@ expressions:
|
|||||||
^/_matrix/client/(api/v1|r0|unstable)/joined_groups$
|
^/_matrix/client/(api/v1|r0|unstable)/joined_groups$
|
||||||
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups$
|
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups$
|
||||||
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups/
|
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups/
|
||||||
^/_synapse/client/password_reset/email/submit_token$
|
|
||||||
|
|
||||||
# Registration/login requests
|
# Registration/login requests
|
||||||
^/_matrix/client/(api/v1|r0|unstable)/login$
|
^/_matrix/client/(api/v1|r0|unstable)/login$
|
||||||
@@ -256,25 +258,30 @@ Additionally, the following endpoints should be included if Synapse is configure
|
|||||||
to use SSO (you only need to include the ones for whichever SSO provider you're
|
to use SSO (you only need to include the ones for whichever SSO provider you're
|
||||||
using):
|
using):
|
||||||
|
|
||||||
|
# for all SSO providers
|
||||||
|
^/_matrix/client/(api/v1|r0|unstable)/login/sso/redirect
|
||||||
|
^/_synapse/client/pick_idp$
|
||||||
|
^/_synapse/client/pick_username
|
||||||
|
^/_synapse/client/new_user_consent$
|
||||||
|
^/_synapse/client/sso_register$
|
||||||
|
|
||||||
# OpenID Connect requests.
|
# OpenID Connect requests.
|
||||||
^/_matrix/client/(api/v1|r0|unstable)/login/sso/redirect$
|
^/_synapse/client/oidc/callback$
|
||||||
^/_synapse/oidc/callback$
|
|
||||||
|
|
||||||
# SAML requests.
|
# SAML requests.
|
||||||
^/_matrix/client/(api/v1|r0|unstable)/login/sso/redirect$
|
^/_synapse/client/saml2/authn_response$
|
||||||
^/_matrix/saml2/authn_response$
|
|
||||||
|
|
||||||
# CAS requests.
|
# CAS requests.
|
||||||
^/_matrix/client/(api/v1|r0|unstable)/login/(cas|sso)/redirect$
|
|
||||||
^/_matrix/client/(api/v1|r0|unstable)/login/cas/ticket$
|
^/_matrix/client/(api/v1|r0|unstable)/login/cas/ticket$
|
||||||
|
|
||||||
|
Ensure that all SSO logins go to a single process.
|
||||||
|
For multiple workers not handling the SSO endpoints properly, see
|
||||||
|
[#7530](https://github.com/matrix-org/synapse/issues/7530) and
|
||||||
|
[#9427](https://github.com/matrix-org/synapse/issues/9427).
|
||||||
|
|
||||||
Note that a HTTP listener with `client` and `federation` resources must be
|
Note that a HTTP listener with `client` and `federation` resources must be
|
||||||
configured in the `worker_listeners` option in the worker config.
|
configured in the `worker_listeners` option in the worker config.
|
||||||
|
|
||||||
Ensure that all SSO logins go to a single process (usually the main process).
|
|
||||||
For multiple workers not handling the SSO endpoints properly, see
|
|
||||||
[#7530](https://github.com/matrix-org/synapse/issues/7530).
|
|
||||||
|
|
||||||
#### Load balancing
|
#### Load balancing
|
||||||
|
|
||||||
It is possible to run multiple instances of this worker app, with incoming requests
|
It is possible to run multiple instances of this worker app, with incoming requests
|
||||||
@@ -367,7 +374,15 @@ Handles sending push notifications to sygnal and email. Doesn't handle any
|
|||||||
REST endpoints itself, but you should set `start_pushers: False` in the
|
REST endpoints itself, but you should set `start_pushers: False` in the
|
||||||
shared configuration file to stop the main synapse sending push notifications.
|
shared configuration file to stop the main synapse sending push notifications.
|
||||||
|
|
||||||
Note this worker cannot be load-balanced: only one instance should be active.
|
To run multiple instances at once the `pusher_instances` option should list all
|
||||||
|
pusher instances by their worker name, e.g.:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
pusher_instances:
|
||||||
|
- pusher_worker1
|
||||||
|
- pusher_worker2
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### `synapse.app.appservice`
|
### `synapse.app.appservice`
|
||||||
|
|
||||||
|
|||||||
41
mypy.ini
41
mypy.ini
@@ -23,39 +23,8 @@ files =
|
|||||||
synapse/events/validator.py,
|
synapse/events/validator.py,
|
||||||
synapse/events/spamcheck.py,
|
synapse/events/spamcheck.py,
|
||||||
synapse/federation,
|
synapse/federation,
|
||||||
synapse/handlers/_base.py,
|
synapse/groups,
|
||||||
synapse/handlers/account_data.py,
|
synapse/handlers,
|
||||||
synapse/handlers/account_validity.py,
|
|
||||||
synapse/handlers/admin.py,
|
|
||||||
synapse/handlers/appservice.py,
|
|
||||||
synapse/handlers/auth.py,
|
|
||||||
synapse/handlers/cas_handler.py,
|
|
||||||
synapse/handlers/deactivate_account.py,
|
|
||||||
synapse/handlers/device.py,
|
|
||||||
synapse/handlers/devicemessage.py,
|
|
||||||
synapse/handlers/directory.py,
|
|
||||||
synapse/handlers/events.py,
|
|
||||||
synapse/handlers/federation.py,
|
|
||||||
synapse/handlers/identity.py,
|
|
||||||
synapse/handlers/initial_sync.py,
|
|
||||||
synapse/handlers/message.py,
|
|
||||||
synapse/handlers/oidc_handler.py,
|
|
||||||
synapse/handlers/pagination.py,
|
|
||||||
synapse/handlers/password_policy.py,
|
|
||||||
synapse/handlers/presence.py,
|
|
||||||
synapse/handlers/profile.py,
|
|
||||||
synapse/handlers/read_marker.py,
|
|
||||||
synapse/handlers/receipts.py,
|
|
||||||
synapse/handlers/register.py,
|
|
||||||
synapse/handlers/room.py,
|
|
||||||
synapse/handlers/room_list.py,
|
|
||||||
synapse/handlers/room_member.py,
|
|
||||||
synapse/handlers/room_member_worker.py,
|
|
||||||
synapse/handlers/saml_handler.py,
|
|
||||||
synapse/handlers/sso.py,
|
|
||||||
synapse/handlers/sync.py,
|
|
||||||
synapse/handlers/user_directory.py,
|
|
||||||
synapse/handlers/ui_auth,
|
|
||||||
synapse/http/client.py,
|
synapse/http/client.py,
|
||||||
synapse/http/federation/matrix_federation_agent.py,
|
synapse/http/federation/matrix_federation_agent.py,
|
||||||
synapse/http/federation/well_known_resolver.py,
|
synapse/http/federation/well_known_resolver.py,
|
||||||
@@ -194,3 +163,9 @@ ignore_missing_imports = True
|
|||||||
|
|
||||||
[mypy-hiredis]
|
[mypy-hiredis]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
|
|
||||||
|
[mypy-josepy.*]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
|
||||||
|
[mypy-txacme.*]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
|||||||
@@ -162,12 +162,23 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Delete schema_version, applied_schema_deltas and applied_module_schemas tables
|
# Delete schema_version, applied_schema_deltas and applied_module_schemas tables
|
||||||
|
# Also delete any shadow tables from fts4
|
||||||
# This needs to be done after synapse_port_db is run
|
# This needs to be done after synapse_port_db is run
|
||||||
echo "Dropping unwanted db tables..."
|
echo "Dropping unwanted db tables..."
|
||||||
SQL="
|
SQL="
|
||||||
DROP TABLE schema_version;
|
DROP TABLE schema_version;
|
||||||
DROP TABLE applied_schema_deltas;
|
DROP TABLE applied_schema_deltas;
|
||||||
DROP TABLE applied_module_schemas;
|
DROP TABLE applied_module_schemas;
|
||||||
|
DROP TABLE event_search_content;
|
||||||
|
DROP TABLE event_search_segments;
|
||||||
|
DROP TABLE event_search_segdir;
|
||||||
|
DROP TABLE event_search_docsize;
|
||||||
|
DROP TABLE event_search_stat;
|
||||||
|
DROP TABLE user_directory_search_content;
|
||||||
|
DROP TABLE user_directory_search_segments;
|
||||||
|
DROP TABLE user_directory_search_segdir;
|
||||||
|
DROP TABLE user_directory_search_docsize;
|
||||||
|
DROP TABLE user_directory_search_stat;
|
||||||
"
|
"
|
||||||
sqlite3 "$SQLITE_DB" <<< "$SQL"
|
sqlite3 "$SQLITE_DB" <<< "$SQL"
|
||||||
psql $POSTGRES_DB_NAME -U "$POSTGRES_USERNAME" -w <<< "$SQL"
|
psql $POSTGRES_DB_NAME -U "$POSTGRES_USERNAME" -w <<< "$SQL"
|
||||||
|
|||||||
@@ -87,7 +87,9 @@ def cached_function_method_signature(ctx: MethodSigContext) -> CallableType:
|
|||||||
arg_kinds.append(ARG_NAMED_OPT) # Arg is an optional kwarg.
|
arg_kinds.append(ARG_NAMED_OPT) # Arg is an optional kwarg.
|
||||||
|
|
||||||
signature = signature.copy_modified(
|
signature = signature.copy_modified(
|
||||||
arg_types=arg_types, arg_names=arg_names, arg_kinds=arg_kinds,
|
arg_types=arg_types,
|
||||||
|
arg_names=arg_names,
|
||||||
|
arg_kinds=arg_kinds,
|
||||||
)
|
)
|
||||||
|
|
||||||
return signature
|
return signature
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ import logging
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
from typing import Dict, Optional, Set
|
from typing import Dict, Iterable, Optional, Set
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@@ -47,6 +47,7 @@ from synapse.storage.databases.main.events_bg_updates import (
|
|||||||
from synapse.storage.databases.main.media_repository import (
|
from synapse.storage.databases.main.media_repository import (
|
||||||
MediaRepositoryBackgroundUpdateStore,
|
MediaRepositoryBackgroundUpdateStore,
|
||||||
)
|
)
|
||||||
|
from synapse.storage.databases.main.pusher import PusherWorkerStore
|
||||||
from synapse.storage.databases.main.registration import (
|
from synapse.storage.databases.main.registration import (
|
||||||
RegistrationBackgroundUpdateStore,
|
RegistrationBackgroundUpdateStore,
|
||||||
find_max_generated_user_id_localpart,
|
find_max_generated_user_id_localpart,
|
||||||
@@ -177,6 +178,7 @@ class Store(
|
|||||||
UserDirectoryBackgroundUpdateStore,
|
UserDirectoryBackgroundUpdateStore,
|
||||||
EndToEndKeyBackgroundStore,
|
EndToEndKeyBackgroundStore,
|
||||||
StatsStore,
|
StatsStore,
|
||||||
|
PusherWorkerStore,
|
||||||
):
|
):
|
||||||
def execute(self, f, *args, **kwargs):
|
def execute(self, f, *args, **kwargs):
|
||||||
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
||||||
@@ -629,7 +631,13 @@ class Porter(object):
|
|||||||
await self._setup_state_group_id_seq()
|
await self._setup_state_group_id_seq()
|
||||||
await self._setup_user_id_seq()
|
await self._setup_user_id_seq()
|
||||||
await self._setup_events_stream_seqs()
|
await self._setup_events_stream_seqs()
|
||||||
await self._setup_device_inbox_seq()
|
await self._setup_sequence(
|
||||||
|
"device_inbox_sequence", ("device_inbox", "device_federation_outbox")
|
||||||
|
)
|
||||||
|
await self._setup_sequence(
|
||||||
|
"account_data_sequence", ("room_account_data", "room_tags_revisions", "account_data"))
|
||||||
|
await self._setup_sequence("receipts_sequence", ("receipts_linearized", ))
|
||||||
|
await self._setup_auth_chain_sequence()
|
||||||
|
|
||||||
# Step 3. Get tables.
|
# Step 3. Get tables.
|
||||||
self.progress.set_state("Fetching tables")
|
self.progress.set_state("Fetching tables")
|
||||||
@@ -854,7 +862,7 @@ class Porter(object):
|
|||||||
|
|
||||||
return done, remaining + done
|
return done, remaining + done
|
||||||
|
|
||||||
async def _setup_state_group_id_seq(self):
|
async def _setup_state_group_id_seq(self) -> None:
|
||||||
curr_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
curr_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||||
table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
|
table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
|
||||||
)
|
)
|
||||||
@@ -868,7 +876,7 @@ class Porter(object):
|
|||||||
|
|
||||||
await self.postgres_store.db_pool.runInteraction("setup_state_group_id_seq", r)
|
await self.postgres_store.db_pool.runInteraction("setup_state_group_id_seq", r)
|
||||||
|
|
||||||
async def _setup_user_id_seq(self):
|
async def _setup_user_id_seq(self) -> None:
|
||||||
curr_id = await self.sqlite_store.db_pool.runInteraction(
|
curr_id = await self.sqlite_store.db_pool.runInteraction(
|
||||||
"setup_user_id_seq", find_max_generated_user_id_localpart
|
"setup_user_id_seq", find_max_generated_user_id_localpart
|
||||||
)
|
)
|
||||||
@@ -877,9 +885,9 @@ class Porter(object):
|
|||||||
next_id = curr_id + 1
|
next_id = curr_id + 1
|
||||||
txn.execute("ALTER SEQUENCE user_id_seq RESTART WITH %s", (next_id,))
|
txn.execute("ALTER SEQUENCE user_id_seq RESTART WITH %s", (next_id,))
|
||||||
|
|
||||||
return self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r)
|
await self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r)
|
||||||
|
|
||||||
async def _setup_events_stream_seqs(self):
|
async def _setup_events_stream_seqs(self) -> None:
|
||||||
"""Set the event stream sequences to the correct values.
|
"""Set the event stream sequences to the correct values.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -908,35 +916,46 @@ class Porter(object):
|
|||||||
(curr_backward_id + 1,),
|
(curr_backward_id + 1,),
|
||||||
)
|
)
|
||||||
|
|
||||||
return await self.postgres_store.db_pool.runInteraction(
|
await self.postgres_store.db_pool.runInteraction(
|
||||||
"_setup_events_stream_seqs", _setup_events_stream_seqs_set_pos,
|
"_setup_events_stream_seqs", _setup_events_stream_seqs_set_pos,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _setup_device_inbox_seq(self):
|
async def _setup_sequence(self, sequence_name: str, stream_id_tables: Iterable[str]) -> None:
|
||||||
"""Set the device inbox sequence to the correct value.
|
"""Set a sequence to the correct value.
|
||||||
"""
|
"""
|
||||||
curr_local_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
current_stream_ids = []
|
||||||
table="device_inbox",
|
for stream_id_table in stream_id_tables:
|
||||||
keyvalues={},
|
max_stream_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||||
retcol="COALESCE(MAX(stream_id), 1)",
|
table=stream_id_table,
|
||||||
allow_none=True,
|
keyvalues={},
|
||||||
)
|
retcol="COALESCE(MAX(stream_id), 1)",
|
||||||
|
allow_none=True,
|
||||||
|
)
|
||||||
|
current_stream_ids.append(max_stream_id)
|
||||||
|
|
||||||
curr_federation_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
next_id = max(current_stream_ids) + 1
|
||||||
table="device_federation_outbox",
|
|
||||||
keyvalues={},
|
|
||||||
retcol="COALESCE(MAX(stream_id), 1)",
|
|
||||||
allow_none=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
next_id = max(curr_local_id, curr_federation_id) + 1
|
def r(txn):
|
||||||
|
sql = "ALTER SEQUENCE %s RESTART WITH" % (sequence_name, )
|
||||||
|
txn.execute(sql + " %s", (next_id, ))
|
||||||
|
|
||||||
|
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (sequence_name,), r)
|
||||||
|
|
||||||
|
async def _setup_auth_chain_sequence(self) -> None:
|
||||||
|
curr_chain_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||||
|
table="event_auth_chains", keyvalues={}, retcol="MAX(chain_id)", allow_none=True
|
||||||
|
)
|
||||||
|
|
||||||
def r(txn):
|
def r(txn):
|
||||||
txn.execute(
|
txn.execute(
|
||||||
"ALTER SEQUENCE device_inbox_sequence RESTART WITH %s", (next_id,)
|
"ALTER SEQUENCE event_auth_chain_id RESTART WITH %s",
|
||||||
|
(curr_chain_id,),
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.postgres_store.db_pool.runInteraction("_setup_device_inbox_seq", r)
|
await self.postgres_store.db_pool.runInteraction(
|
||||||
|
"_setup_event_auth_chain_id", r,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
##############################################
|
##############################################
|
||||||
|
|||||||
6
setup.py
6
setup.py
@@ -96,13 +96,13 @@ CONDITIONAL_REQUIREMENTS["all"] = list(ALL_OPTIONAL_REQUIREMENTS)
|
|||||||
#
|
#
|
||||||
# We pin black so that our tests don't start failing on new releases.
|
# We pin black so that our tests don't start failing on new releases.
|
||||||
CONDITIONAL_REQUIREMENTS["lint"] = [
|
CONDITIONAL_REQUIREMENTS["lint"] = [
|
||||||
"isort==5.0.3",
|
"isort==5.7.0",
|
||||||
"black==19.10b0",
|
"black==20.8b1",
|
||||||
"flake8-comprehensions",
|
"flake8-comprehensions",
|
||||||
"flake8",
|
"flake8",
|
||||||
]
|
]
|
||||||
|
|
||||||
CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.790", "mypy-zope==0.2.8"]
|
CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.11"]
|
||||||
|
|
||||||
# Dependencies which are exclusively required by unit test code. This is
|
# Dependencies which are exclusively required by unit test code. This is
|
||||||
# NOT a list of all modules that are necessary to run the unit tests.
|
# NOT a list of all modules that are necessary to run the unit tests.
|
||||||
|
|||||||
@@ -89,12 +89,16 @@ class SortedDict(Dict[_KT, _VT]):
|
|||||||
def __reduce__(
|
def __reduce__(
|
||||||
self,
|
self,
|
||||||
) -> Tuple[
|
) -> Tuple[
|
||||||
Type[SortedDict[_KT, _VT]], Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]],
|
Type[SortedDict[_KT, _VT]],
|
||||||
|
Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]],
|
||||||
]: ...
|
]: ...
|
||||||
def __repr__(self) -> str: ...
|
def __repr__(self) -> str: ...
|
||||||
def _check(self) -> None: ...
|
def _check(self) -> None: ...
|
||||||
def islice(
|
def islice(
|
||||||
self, start: Optional[int] = ..., stop: Optional[int] = ..., reverse=bool,
|
self,
|
||||||
|
start: Optional[int] = ...,
|
||||||
|
stop: Optional[int] = ...,
|
||||||
|
reverse=bool,
|
||||||
) -> Iterator[_KT]: ...
|
) -> Iterator[_KT]: ...
|
||||||
def bisect_left(self, value: _KT) -> int: ...
|
def bisect_left(self, value: _KT) -> int: ...
|
||||||
def bisect_right(self, value: _KT) -> int: ...
|
def bisect_right(self, value: _KT) -> int: ...
|
||||||
|
|||||||
@@ -31,7 +31,9 @@ class SortedList(MutableSequence[_T]):
|
|||||||
|
|
||||||
DEFAULT_LOAD_FACTOR: int = ...
|
DEFAULT_LOAD_FACTOR: int = ...
|
||||||
def __init__(
|
def __init__(
|
||||||
self, iterable: Optional[Iterable[_T]] = ..., key: Optional[_Key[_T]] = ...,
|
self,
|
||||||
|
iterable: Optional[Iterable[_T]] = ...,
|
||||||
|
key: Optional[_Key[_T]] = ...,
|
||||||
): ...
|
): ...
|
||||||
# NB: currently mypy does not honour return type, see mypy #3307
|
# NB: currently mypy does not honour return type, see mypy #3307
|
||||||
@overload
|
@overload
|
||||||
@@ -76,10 +78,18 @@ class SortedList(MutableSequence[_T]):
|
|||||||
def __len__(self) -> int: ...
|
def __len__(self) -> int: ...
|
||||||
def reverse(self) -> None: ...
|
def reverse(self) -> None: ...
|
||||||
def islice(
|
def islice(
|
||||||
self, start: Optional[int] = ..., stop: Optional[int] = ..., reverse=bool,
|
self,
|
||||||
|
start: Optional[int] = ...,
|
||||||
|
stop: Optional[int] = ...,
|
||||||
|
reverse=bool,
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def _islice(
|
def _islice(
|
||||||
self, min_pos: int, min_idx: int, max_pos: int, max_idx: int, reverse: bool,
|
self,
|
||||||
|
min_pos: int,
|
||||||
|
min_idx: int,
|
||||||
|
max_pos: int,
|
||||||
|
max_idx: int,
|
||||||
|
reverse: bool,
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def irange(
|
def irange(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -15,13 +15,23 @@
|
|||||||
|
|
||||||
"""Contains *incomplete* type hints for txredisapi.
|
"""Contains *incomplete* type hints for txredisapi.
|
||||||
"""
|
"""
|
||||||
|
from typing import Any, List, Optional, Type, Union
|
||||||
from typing import List, Optional, Type, Union
|
|
||||||
|
|
||||||
class RedisProtocol:
|
class RedisProtocol:
|
||||||
def publish(self, channel: str, message: bytes): ...
|
def publish(self, channel: str, message: bytes): ...
|
||||||
|
async def ping(self) -> None: ...
|
||||||
|
async def set(
|
||||||
|
self,
|
||||||
|
key: str,
|
||||||
|
value: Any,
|
||||||
|
expire: Optional[int] = None,
|
||||||
|
pexpire: Optional[int] = None,
|
||||||
|
only_if_not_exists: bool = False,
|
||||||
|
only_if_exists: bool = False,
|
||||||
|
) -> None: ...
|
||||||
|
async def get(self, key: str) -> Any: ...
|
||||||
|
|
||||||
class SubscriberProtocol:
|
class SubscriberProtocol(RedisProtocol):
|
||||||
def __init__(self, *args, **kwargs): ...
|
def __init__(self, *args, **kwargs): ...
|
||||||
password: Optional[str]
|
password: Optional[str]
|
||||||
def subscribe(self, channels: Union[str, List[str]]): ...
|
def subscribe(self, channels: Union[str, List[str]]): ...
|
||||||
@@ -40,14 +50,13 @@ def lazyConnection(
|
|||||||
convertNumbers: bool = ...,
|
convertNumbers: bool = ...,
|
||||||
) -> RedisProtocol: ...
|
) -> RedisProtocol: ...
|
||||||
|
|
||||||
class SubscriberFactory:
|
|
||||||
def buildProtocol(self, addr): ...
|
|
||||||
|
|
||||||
class ConnectionHandler: ...
|
class ConnectionHandler: ...
|
||||||
|
|
||||||
class RedisFactory:
|
class RedisFactory:
|
||||||
continueTrying: bool
|
continueTrying: bool
|
||||||
handler: RedisProtocol
|
handler: RedisProtocol
|
||||||
|
pool: List[RedisProtocol]
|
||||||
|
replyTimeout: Optional[int]
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
uuid: str,
|
uuid: str,
|
||||||
@@ -60,3 +69,7 @@ class RedisFactory:
|
|||||||
replyTimeout: Optional[int] = None,
|
replyTimeout: Optional[int] = None,
|
||||||
convertNumbers: Optional[int] = True,
|
convertNumbers: Optional[int] = True,
|
||||||
): ...
|
): ...
|
||||||
|
def buildProtocol(self, addr) -> RedisProtocol: ...
|
||||||
|
|
||||||
|
class SubscriberFactory(RedisFactory):
|
||||||
|
def __init__(self): ...
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
__version__ = "1.26.0rc1"
|
__version__ = "1.29.0"
|
||||||
|
|
||||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||||
# We import here so that we don't have to install a bunch of deps when
|
# We import here so that we don't have to install a bunch of deps when
|
||||||
|
|||||||
@@ -168,7 +168,7 @@ class Auth:
|
|||||||
rights: str = "access",
|
rights: str = "access",
|
||||||
allow_expired: bool = False,
|
allow_expired: bool = False,
|
||||||
) -> synapse.types.Requester:
|
) -> synapse.types.Requester:
|
||||||
""" Get a registered user's ID.
|
"""Get a registered user's ID.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request: An HTTP request with an access_token query parameter.
|
request: An HTTP request with an access_token query parameter.
|
||||||
@@ -294,9 +294,12 @@ class Auth:
|
|||||||
return user_id, app_service
|
return user_id, app_service
|
||||||
|
|
||||||
async def get_user_by_access_token(
|
async def get_user_by_access_token(
|
||||||
self, token: str, rights: str = "access", allow_expired: bool = False,
|
self,
|
||||||
|
token: str,
|
||||||
|
rights: str = "access",
|
||||||
|
allow_expired: bool = False,
|
||||||
) -> TokenLookupResult:
|
) -> TokenLookupResult:
|
||||||
""" Validate access token and get user_id from it
|
"""Validate access token and get user_id from it
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
token: The access token to get the user by
|
token: The access token to get the user by
|
||||||
@@ -489,7 +492,7 @@ class Auth:
|
|||||||
return service
|
return service
|
||||||
|
|
||||||
async def is_server_admin(self, user: UserID) -> bool:
|
async def is_server_admin(self, user: UserID) -> bool:
|
||||||
""" Check if the given user is a local server admin.
|
"""Check if the given user is a local server admin.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
user: user to check
|
user: user to check
|
||||||
@@ -500,7 +503,10 @@ class Auth:
|
|||||||
return await self.store.is_server_admin(user)
|
return await self.store.is_server_admin(user)
|
||||||
|
|
||||||
def compute_auth_events(
|
def compute_auth_events(
|
||||||
self, event, current_state_ids: StateMap[str], for_verification: bool = False,
|
self,
|
||||||
|
event,
|
||||||
|
current_state_ids: StateMap[str],
|
||||||
|
for_verification: bool = False,
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
"""Given an event and current state return the list of event IDs used
|
"""Given an event and current state return the list of event IDs used
|
||||||
to auth an event.
|
to auth an event.
|
||||||
|
|||||||
@@ -27,6 +27,11 @@ MAX_ALIAS_LENGTH = 255
|
|||||||
# the maximum length for a user id is 255 characters
|
# the maximum length for a user id is 255 characters
|
||||||
MAX_USERID_LENGTH = 255
|
MAX_USERID_LENGTH = 255
|
||||||
|
|
||||||
|
# The maximum length for a group id is 255 characters
|
||||||
|
MAX_GROUPID_LENGTH = 255
|
||||||
|
MAX_GROUP_CATEGORYID_LENGTH = 255
|
||||||
|
MAX_GROUP_ROLEID_LENGTH = 255
|
||||||
|
|
||||||
|
|
||||||
class Membership:
|
class Membership:
|
||||||
|
|
||||||
@@ -93,11 +98,14 @@ class EventTypes:
|
|||||||
|
|
||||||
Retention = "m.room.retention"
|
Retention = "m.room.retention"
|
||||||
|
|
||||||
Presence = "m.presence"
|
|
||||||
|
|
||||||
Dummy = "org.matrix.dummy_event"
|
Dummy = "org.matrix.dummy_event"
|
||||||
|
|
||||||
|
|
||||||
|
class EduTypes:
|
||||||
|
Presence = "m.presence"
|
||||||
|
RoomKeyRequest = "m.room_key_request"
|
||||||
|
|
||||||
|
|
||||||
class RejectedReason:
|
class RejectedReason:
|
||||||
AUTH_ERROR = "auth_error"
|
AUTH_ERROR = "auth_error"
|
||||||
|
|
||||||
@@ -128,8 +136,7 @@ class UserTypes:
|
|||||||
|
|
||||||
|
|
||||||
class RelationTypes:
|
class RelationTypes:
|
||||||
"""The types of relations known to this server.
|
"""The types of relations known to this server."""
|
||||||
"""
|
|
||||||
|
|
||||||
ANNOTATION = "m.annotation"
|
ANNOTATION = "m.annotation"
|
||||||
REPLACE = "m.replace"
|
REPLACE = "m.replace"
|
||||||
|
|||||||
@@ -390,8 +390,7 @@ class InvalidCaptchaError(SynapseError):
|
|||||||
|
|
||||||
|
|
||||||
class LimitExceededError(SynapseError):
|
class LimitExceededError(SynapseError):
|
||||||
"""A client has sent too many requests and is being throttled.
|
"""A client has sent too many requests and is being throttled."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -408,8 +407,7 @@ class LimitExceededError(SynapseError):
|
|||||||
|
|
||||||
|
|
||||||
class RoomKeysVersionError(SynapseError):
|
class RoomKeysVersionError(SynapseError):
|
||||||
"""A client has tried to upload to a non-current version of the room_keys store
|
"""A client has tried to upload to a non-current version of the room_keys store"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, current_version: str):
|
def __init__(self, current_version: str):
|
||||||
"""
|
"""
|
||||||
@@ -426,7 +424,9 @@ class UnsupportedRoomVersionError(SynapseError):
|
|||||||
|
|
||||||
def __init__(self, msg: str = "Homeserver does not support this room version"):
|
def __init__(self, msg: str = "Homeserver does not support this room version"):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
code=400, msg=msg, errcode=Codes.UNSUPPORTED_ROOM_VERSION,
|
code=400,
|
||||||
|
msg=msg,
|
||||||
|
errcode=Codes.UNSUPPORTED_ROOM_VERSION,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -461,8 +461,7 @@ class IncompatibleRoomVersionError(SynapseError):
|
|||||||
|
|
||||||
|
|
||||||
class PasswordRefusedError(SynapseError):
|
class PasswordRefusedError(SynapseError):
|
||||||
"""A password has been refused, either during password reset/change or registration.
|
"""A password has been refused, either during password reset/change or registration."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -470,7 +469,9 @@ class PasswordRefusedError(SynapseError):
|
|||||||
errcode: str = Codes.WEAK_PASSWORD,
|
errcode: str = Codes.WEAK_PASSWORD,
|
||||||
):
|
):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
code=400, msg=msg, errcode=errcode,
|
code=400,
|
||||||
|
msg=msg,
|
||||||
|
errcode=errcode,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -493,7 +494,7 @@ class RequestSendFailed(RuntimeError):
|
|||||||
|
|
||||||
|
|
||||||
def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs):
|
def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs):
|
||||||
""" Utility method for constructing an error response for client-server
|
"""Utility method for constructing an error response for client-server
|
||||||
interactions.
|
interactions.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -510,7 +511,7 @@ def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
class FederationError(RuntimeError):
|
class FederationError(RuntimeError):
|
||||||
""" This class is used to inform remote homeservers about erroneous
|
"""This class is used to inform remote homeservers about erroneous
|
||||||
PDUs they sent us.
|
PDUs they sent us.
|
||||||
|
|
||||||
FATAL: The remote server could not interpret the source event.
|
FATAL: The remote server could not interpret the source event.
|
||||||
|
|||||||
@@ -56,8 +56,7 @@ class UserPresenceState(
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def default(cls, user_id):
|
def default(cls, user_id):
|
||||||
"""Returns a default presence state.
|
"""Returns a default presence state."""
|
||||||
"""
|
|
||||||
return cls(
|
return cls(
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
state=PresenceState.OFFLINE,
|
state=PresenceState.OFFLINE,
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from typing import Any, Optional, Tuple
|
from typing import Hashable, Optional, Tuple
|
||||||
|
|
||||||
from synapse.api.errors import LimitExceededError
|
from synapse.api.errors import LimitExceededError
|
||||||
from synapse.types import Requester
|
from synapse.types import Requester
|
||||||
@@ -42,7 +42,9 @@ class Ratelimiter:
|
|||||||
# * How many times an action has occurred since a point in time
|
# * How many times an action has occurred since a point in time
|
||||||
# * The point in time
|
# * The point in time
|
||||||
# * The rate_hz of this particular entry. This can vary per request
|
# * The rate_hz of this particular entry. This can vary per request
|
||||||
self.actions = OrderedDict() # type: OrderedDict[Any, Tuple[float, int, float]]
|
self.actions = (
|
||||||
|
OrderedDict()
|
||||||
|
) # type: OrderedDict[Hashable, Tuple[float, int, float]]
|
||||||
|
|
||||||
def can_requester_do_action(
|
def can_requester_do_action(
|
||||||
self,
|
self,
|
||||||
@@ -82,7 +84,7 @@ class Ratelimiter:
|
|||||||
|
|
||||||
def can_do_action(
|
def can_do_action(
|
||||||
self,
|
self,
|
||||||
key: Any,
|
key: Hashable,
|
||||||
rate_hz: Optional[float] = None,
|
rate_hz: Optional[float] = None,
|
||||||
burst_count: Optional[int] = None,
|
burst_count: Optional[int] = None,
|
||||||
update: bool = True,
|
update: bool = True,
|
||||||
@@ -175,7 +177,7 @@ class Ratelimiter:
|
|||||||
|
|
||||||
def ratelimit(
|
def ratelimit(
|
||||||
self,
|
self,
|
||||||
key: Any,
|
key: Hashable,
|
||||||
rate_hz: Optional[float] = None,
|
rate_hz: Optional[float] = None,
|
||||||
burst_count: Optional[int] = None,
|
burst_count: Optional[int] = None,
|
||||||
update: bool = True,
|
update: bool = True,
|
||||||
|
|||||||
@@ -42,6 +42,8 @@ class ConsentURIBuilder:
|
|||||||
"""
|
"""
|
||||||
if hs_config.form_secret is None:
|
if hs_config.form_secret is None:
|
||||||
raise ConfigError("form_secret not set in config")
|
raise ConfigError("form_secret not set in config")
|
||||||
|
if hs_config.public_baseurl is None:
|
||||||
|
raise ConfigError("public_baseurl not set in config")
|
||||||
|
|
||||||
self._hmac_secret = hs_config.form_secret.encode("utf-8")
|
self._hmac_secret = hs_config.form_secret.encode("utf-8")
|
||||||
self._public_baseurl = hs_config.public_baseurl
|
self._public_baseurl = hs_config.public_baseurl
|
||||||
|
|||||||
@@ -17,8 +17,6 @@ import sys
|
|||||||
|
|
||||||
from synapse import python_dependencies # noqa: E402
|
from synapse import python_dependencies # noqa: E402
|
||||||
|
|
||||||
sys.dont_write_bytecode = True
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -16,6 +16,7 @@
|
|||||||
import gc
|
import gc
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
import signal
|
import signal
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
@@ -57,7 +58,7 @@ def register_sighup(func, *args, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
def start_worker_reactor(appname, config, run_command=reactor.run):
|
def start_worker_reactor(appname, config, run_command=reactor.run):
|
||||||
""" Run the reactor in the main process
|
"""Run the reactor in the main process
|
||||||
|
|
||||||
Daemonizes if necessary, and then configures some resources, before starting
|
Daemonizes if necessary, and then configures some resources, before starting
|
||||||
the reactor. Pulls configuration from the 'worker' settings in 'config'.
|
the reactor. Pulls configuration from the 'worker' settings in 'config'.
|
||||||
@@ -92,7 +93,7 @@ def start_reactor(
|
|||||||
logger,
|
logger,
|
||||||
run_command=reactor.run,
|
run_command=reactor.run,
|
||||||
):
|
):
|
||||||
""" Run the reactor in the main process
|
"""Run the reactor in the main process
|
||||||
|
|
||||||
Daemonizes if necessary, and then configures some resources, before starting
|
Daemonizes if necessary, and then configures some resources, before starting
|
||||||
the reactor
|
the reactor
|
||||||
@@ -312,9 +313,7 @@ async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerCon
|
|||||||
refresh_certificate(hs)
|
refresh_certificate(hs)
|
||||||
|
|
||||||
# Start the tracer
|
# Start the tracer
|
||||||
synapse.logging.opentracing.init_tracer( # type: ignore[attr-defined] # noqa
|
synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa
|
||||||
hs
|
|
||||||
)
|
|
||||||
|
|
||||||
# It is now safe to start your Synapse.
|
# It is now safe to start your Synapse.
|
||||||
hs.start_listening(listeners)
|
hs.start_listening(listeners)
|
||||||
@@ -339,7 +338,7 @@ async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerCon
|
|||||||
# rest of time. Doing so means less work each GC (hopefully).
|
# rest of time. Doing so means less work each GC (hopefully).
|
||||||
#
|
#
|
||||||
# This only works on Python 3.7
|
# This only works on Python 3.7
|
||||||
if sys.version_info >= (3, 7):
|
if platform.python_implementation() == "CPython" and sys.version_info >= (3, 7):
|
||||||
gc.collect()
|
gc.collect()
|
||||||
gc.freeze()
|
gc.freeze()
|
||||||
|
|
||||||
@@ -369,8 +368,7 @@ def setup_sentry(hs):
|
|||||||
|
|
||||||
|
|
||||||
def setup_sdnotify(hs):
|
def setup_sdnotify(hs):
|
||||||
"""Adds process state hooks to tell systemd what we are up to.
|
"""Adds process state hooks to tell systemd what we are up to."""
|
||||||
"""
|
|
||||||
|
|
||||||
# Tell systemd our state, if we're using it. This will silently fail if
|
# Tell systemd our state, if we're using it. This will silently fail if
|
||||||
# we're not using systemd.
|
# we're not using systemd.
|
||||||
@@ -404,8 +402,7 @@ def install_dns_limiter(reactor, max_dns_requests_in_flight=100):
|
|||||||
|
|
||||||
|
|
||||||
class _LimitedHostnameResolver:
|
class _LimitedHostnameResolver:
|
||||||
"""Wraps a IHostnameResolver, limiting the number of in-flight DNS lookups.
|
"""Wraps a IHostnameResolver, limiting the number of in-flight DNS lookups."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, resolver, max_dns_requests_in_flight):
|
def __init__(self, resolver, max_dns_requests_in_flight):
|
||||||
self._resolver = resolver
|
self._resolver = resolver
|
||||||
|
|||||||
@@ -210,7 +210,9 @@ def start(config_options):
|
|||||||
config.update_user_directory = False
|
config.update_user_directory = False
|
||||||
config.run_background_tasks = False
|
config.run_background_tasks = False
|
||||||
config.start_pushers = False
|
config.start_pushers = False
|
||||||
|
config.pusher_shard_config.instances = []
|
||||||
config.send_federation = False
|
config.send_federation = False
|
||||||
|
config.federation_shard_config.instances = []
|
||||||
|
|
||||||
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||||
|
|
||||||
|
|||||||
@@ -22,6 +22,8 @@ from typing import Dict, Iterable, Optional, Set
|
|||||||
from typing_extensions import ContextManager
|
from typing_extensions import ContextManager
|
||||||
|
|
||||||
from twisted.internet import address
|
from twisted.internet import address
|
||||||
|
from twisted.web.resource import IResource
|
||||||
|
from twisted.web.server import Request
|
||||||
|
|
||||||
import synapse
|
import synapse
|
||||||
import synapse.events
|
import synapse.events
|
||||||
@@ -90,9 +92,8 @@ from synapse.replication.tcp.streams import (
|
|||||||
ToDeviceStream,
|
ToDeviceStream,
|
||||||
)
|
)
|
||||||
from synapse.rest.admin import register_servlets_for_media_repo
|
from synapse.rest.admin import register_servlets_for_media_repo
|
||||||
from synapse.rest.client.v1 import events, room
|
from synapse.rest.client.v1 import events, login, room
|
||||||
from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet
|
from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet
|
||||||
from synapse.rest.client.v1.login import LoginRestServlet
|
|
||||||
from synapse.rest.client.v1.profile import (
|
from synapse.rest.client.v1.profile import (
|
||||||
ProfileAvatarURLRestServlet,
|
ProfileAvatarURLRestServlet,
|
||||||
ProfileDisplaynameRestServlet,
|
ProfileDisplaynameRestServlet,
|
||||||
@@ -127,6 +128,7 @@ from synapse.rest.client.v2_alpha.sendtodevice import SendToDeviceRestServlet
|
|||||||
from synapse.rest.client.versions import VersionsRestServlet
|
from synapse.rest.client.versions import VersionsRestServlet
|
||||||
from synapse.rest.health import HealthResource
|
from synapse.rest.health import HealthResource
|
||||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||||
|
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||||
from synapse.server import HomeServer, cache_in_self
|
from synapse.server import HomeServer, cache_in_self
|
||||||
from synapse.storage.databases.main.censor_events import CensorEventsStore
|
from synapse.storage.databases.main.censor_events import CensorEventsStore
|
||||||
from synapse.storage.databases.main.client_ips import ClientIpWorkerStore
|
from synapse.storage.databases.main.client_ips import ClientIpWorkerStore
|
||||||
@@ -189,7 +191,7 @@ class KeyUploadServlet(RestServlet):
|
|||||||
self.http_client = hs.get_simple_http_client()
|
self.http_client = hs.get_simple_http_client()
|
||||||
self.main_uri = hs.config.worker_main_http_uri
|
self.main_uri = hs.config.worker_main_http_uri
|
||||||
|
|
||||||
async def on_POST(self, request, device_id):
|
async def on_POST(self, request: Request, device_id: Optional[str]):
|
||||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||||
user_id = requester.user.to_string()
|
user_id = requester.user.to_string()
|
||||||
body = parse_json_object_from_request(request)
|
body = parse_json_object_from_request(request)
|
||||||
@@ -222,10 +224,12 @@ class KeyUploadServlet(RestServlet):
|
|||||||
header: request.requestHeaders.getRawHeaders(header, [])
|
header: request.requestHeaders.getRawHeaders(header, [])
|
||||||
for header in (b"Authorization", b"User-Agent")
|
for header in (b"Authorization", b"User-Agent")
|
||||||
}
|
}
|
||||||
# Add the previous hop the the X-Forwarded-For header.
|
# Add the previous hop to the X-Forwarded-For header.
|
||||||
x_forwarded_for = request.requestHeaders.getRawHeaders(
|
x_forwarded_for = request.requestHeaders.getRawHeaders(
|
||||||
b"X-Forwarded-For", []
|
b"X-Forwarded-For", []
|
||||||
)
|
)
|
||||||
|
# we use request.client here, since we want the previous hop, not the
|
||||||
|
# original client (as returned by request.getClientAddress()).
|
||||||
if isinstance(request.client, (address.IPv4Address, address.IPv6Address)):
|
if isinstance(request.client, (address.IPv4Address, address.IPv6Address)):
|
||||||
previous_host = request.client.host.encode("ascii")
|
previous_host = request.client.host.encode("ascii")
|
||||||
# If the header exists, add to the comma-separated list of the first
|
# If the header exists, add to the comma-separated list of the first
|
||||||
@@ -238,6 +242,14 @@ class KeyUploadServlet(RestServlet):
|
|||||||
x_forwarded_for = [previous_host]
|
x_forwarded_for = [previous_host]
|
||||||
headers[b"X-Forwarded-For"] = x_forwarded_for
|
headers[b"X-Forwarded-For"] = x_forwarded_for
|
||||||
|
|
||||||
|
# Replicate the original X-Forwarded-Proto header. Note that
|
||||||
|
# XForwardedForRequest overrides isSecure() to give us the original protocol
|
||||||
|
# used by the client, as opposed to the protocol used by our upstream proxy
|
||||||
|
# - which is what we want here.
|
||||||
|
headers[b"X-Forwarded-Proto"] = [
|
||||||
|
b"https" if request.isSecure() else b"http"
|
||||||
|
]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = await self.http_client.post_json_get_json(
|
result = await self.http_client.post_json_get_json(
|
||||||
self.main_uri + request.uri.decode("ascii"), body, headers=headers
|
self.main_uri + request.uri.decode("ascii"), body, headers=headers
|
||||||
@@ -420,8 +432,7 @@ class GenericWorkerPresence(BasePresenceHandler):
|
|||||||
]
|
]
|
||||||
|
|
||||||
async def set_state(self, target_user, state, ignore_status_msg=False):
|
async def set_state(self, target_user, state, ignore_status_msg=False):
|
||||||
"""Set the presence state of the user.
|
"""Set the presence state of the user."""
|
||||||
"""
|
|
||||||
presence = state["presence"]
|
presence = state["presence"]
|
||||||
|
|
||||||
valid_presence = (
|
valid_presence = (
|
||||||
@@ -507,7 +518,7 @@ class GenericWorkerServer(HomeServer):
|
|||||||
site_tag = port
|
site_tag = port
|
||||||
|
|
||||||
# We always include a health resource.
|
# We always include a health resource.
|
||||||
resources = {"/health": HealthResource()}
|
resources = {"/health": HealthResource()} # type: Dict[str, IResource]
|
||||||
|
|
||||||
for res in listener_config.http_options.resources:
|
for res in listener_config.http_options.resources:
|
||||||
for name in res.names:
|
for name in res.names:
|
||||||
@@ -517,7 +528,7 @@ class GenericWorkerServer(HomeServer):
|
|||||||
resource = JsonResource(self, canonical_json=False)
|
resource = JsonResource(self, canonical_json=False)
|
||||||
|
|
||||||
RegisterRestServlet(self).register(resource)
|
RegisterRestServlet(self).register(resource)
|
||||||
LoginRestServlet(self).register(resource)
|
login.register_servlets(self, resource)
|
||||||
ThreepidRestServlet(self).register(resource)
|
ThreepidRestServlet(self).register(resource)
|
||||||
DevicesRestServlet(self).register(resource)
|
DevicesRestServlet(self).register(resource)
|
||||||
KeyQueryServlet(self).register(resource)
|
KeyQueryServlet(self).register(resource)
|
||||||
@@ -557,6 +568,8 @@ class GenericWorkerServer(HomeServer):
|
|||||||
groups.register_servlets(self, resource)
|
groups.register_servlets(self, resource)
|
||||||
|
|
||||||
resources.update({CLIENT_API_PREFIX: resource})
|
resources.update({CLIENT_API_PREFIX: resource})
|
||||||
|
|
||||||
|
resources.update(build_synapse_client_resource_tree(self))
|
||||||
elif name == "federation":
|
elif name == "federation":
|
||||||
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
||||||
elif name == "media":
|
elif name == "media":
|
||||||
@@ -643,9 +656,6 @@ class GenericWorkerServer(HomeServer):
|
|||||||
|
|
||||||
self.get_tcp_replication().start_replication(self)
|
self.get_tcp_replication().start_replication(self)
|
||||||
|
|
||||||
async def remove_pusher(self, app_id, push_key, user_id):
|
|
||||||
self.get_tcp_replication().send_remove_pusher(app_id, push_key, user_id)
|
|
||||||
|
|
||||||
@cache_in_self
|
@cache_in_self
|
||||||
def get_replication_data_handler(self):
|
def get_replication_data_handler(self):
|
||||||
return GenericWorkerReplicationHandler(self)
|
return GenericWorkerReplicationHandler(self)
|
||||||
@@ -920,22 +930,6 @@ def start(config_options):
|
|||||||
# For other worker types we force this to off.
|
# For other worker types we force this to off.
|
||||||
config.appservice.notify_appservices = False
|
config.appservice.notify_appservices = False
|
||||||
|
|
||||||
if config.worker_app == "synapse.app.pusher":
|
|
||||||
if config.server.start_pushers:
|
|
||||||
sys.stderr.write(
|
|
||||||
"\nThe pushers must be disabled in the main synapse process"
|
|
||||||
"\nbefore they can be run in a separate worker."
|
|
||||||
"\nPlease add ``start_pushers: false`` to the main config"
|
|
||||||
"\n"
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Force the pushers to start since they will be disabled in the main config
|
|
||||||
config.server.start_pushers = True
|
|
||||||
else:
|
|
||||||
# For other worker types we force this to off.
|
|
||||||
config.server.start_pushers = False
|
|
||||||
|
|
||||||
if config.worker_app == "synapse.app.user_dir":
|
if config.worker_app == "synapse.app.user_dir":
|
||||||
if config.server.update_user_directory:
|
if config.server.update_user_directory:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
@@ -952,22 +946,6 @@ def start(config_options):
|
|||||||
# For other worker types we force this to off.
|
# For other worker types we force this to off.
|
||||||
config.server.update_user_directory = False
|
config.server.update_user_directory = False
|
||||||
|
|
||||||
if config.worker_app == "synapse.app.federation_sender":
|
|
||||||
if config.worker.send_federation:
|
|
||||||
sys.stderr.write(
|
|
||||||
"\nThe send_federation must be disabled in the main synapse process"
|
|
||||||
"\nbefore they can be run in a separate worker."
|
|
||||||
"\nPlease add ``send_federation: false`` to the main config"
|
|
||||||
"\n"
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Force the pushers to start since they will be disabled in the main config
|
|
||||||
config.worker.send_federation = True
|
|
||||||
else:
|
|
||||||
# For other worker types we force this to off.
|
|
||||||
config.worker.send_federation = False
|
|
||||||
|
|
||||||
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||||
|
|
||||||
hs = GenericWorkerServer(
|
hs = GenericWorkerServer(
|
||||||
|
|||||||
@@ -60,8 +60,7 @@ from synapse.rest import ClientRestResource
|
|||||||
from synapse.rest.admin import AdminRestResource
|
from synapse.rest.admin import AdminRestResource
|
||||||
from synapse.rest.health import HealthResource
|
from synapse.rest.health import HealthResource
|
||||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||||
from synapse.rest.synapse.client.pick_idp import PickIdpResource
|
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||||
from synapse.rest.synapse.client.pick_username import pick_username_resource
|
|
||||||
from synapse.rest.well_known import WellKnownResource
|
from synapse.rest.well_known import WellKnownResource
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.storage import DataStore
|
from synapse.storage import DataStore
|
||||||
@@ -190,21 +189,10 @@ class SynapseHomeServer(HomeServer):
|
|||||||
"/_matrix/client/versions": client_resource,
|
"/_matrix/client/versions": client_resource,
|
||||||
"/.well-known/matrix/client": WellKnownResource(self),
|
"/.well-known/matrix/client": WellKnownResource(self),
|
||||||
"/_synapse/admin": AdminRestResource(self),
|
"/_synapse/admin": AdminRestResource(self),
|
||||||
"/_synapse/client/pick_username": pick_username_resource(self),
|
**build_synapse_client_resource_tree(self),
|
||||||
"/_synapse/client/pick_idp": PickIdpResource(self),
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.get_config().oidc_enabled:
|
|
||||||
from synapse.rest.oidc import OIDCResource
|
|
||||||
|
|
||||||
resources["/_synapse/oidc"] = OIDCResource(self)
|
|
||||||
|
|
||||||
if self.get_config().saml2_enabled:
|
|
||||||
from synapse.rest.saml2 import SAML2Resource
|
|
||||||
|
|
||||||
resources["/_matrix/saml2"] = SAML2Resource(self)
|
|
||||||
|
|
||||||
if self.get_config().threepid_behaviour_email == ThreepidBehaviour.LOCAL:
|
if self.get_config().threepid_behaviour_email == ThreepidBehaviour.LOCAL:
|
||||||
from synapse.rest.synapse.client.password_reset import (
|
from synapse.rest.synapse.client.password_reset import (
|
||||||
PasswordResetSubmitTokenResource,
|
PasswordResetSubmitTokenResource,
|
||||||
|
|||||||
@@ -93,15 +93,20 @@ async def phone_stats_home(hs, stats, stats_process=_stats_process):
|
|||||||
|
|
||||||
stats["daily_active_users"] = await hs.get_datastore().count_daily_users()
|
stats["daily_active_users"] = await hs.get_datastore().count_daily_users()
|
||||||
stats["monthly_active_users"] = await hs.get_datastore().count_monthly_users()
|
stats["monthly_active_users"] = await hs.get_datastore().count_monthly_users()
|
||||||
|
daily_active_e2ee_rooms = await hs.get_datastore().count_daily_active_e2ee_rooms()
|
||||||
|
stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms
|
||||||
|
stats["daily_e2ee_messages"] = await hs.get_datastore().count_daily_e2ee_messages()
|
||||||
|
daily_sent_e2ee_messages = await hs.get_datastore().count_daily_sent_e2ee_messages()
|
||||||
|
stats["daily_sent_e2ee_messages"] = daily_sent_e2ee_messages
|
||||||
stats["daily_active_rooms"] = await hs.get_datastore().count_daily_active_rooms()
|
stats["daily_active_rooms"] = await hs.get_datastore().count_daily_active_rooms()
|
||||||
stats["daily_messages"] = await hs.get_datastore().count_daily_messages()
|
stats["daily_messages"] = await hs.get_datastore().count_daily_messages()
|
||||||
|
daily_sent_messages = await hs.get_datastore().count_daily_sent_messages()
|
||||||
|
stats["daily_sent_messages"] = daily_sent_messages
|
||||||
|
|
||||||
r30_results = await hs.get_datastore().count_r30_users()
|
r30_results = await hs.get_datastore().count_r30_users()
|
||||||
for name, count in r30_results.items():
|
for name, count in r30_results.items():
|
||||||
stats["r30_users_" + name] = count
|
stats["r30_users_" + name] = count
|
||||||
|
|
||||||
daily_sent_messages = await hs.get_datastore().count_daily_sent_messages()
|
|
||||||
stats["daily_sent_messages"] = daily_sent_messages
|
|
||||||
stats["cache_factor"] = hs.config.caches.global_factor
|
stats["cache_factor"] = hs.config.caches.global_factor
|
||||||
stats["event_cache_size"] = hs.config.caches.event_cache_size
|
stats["event_cache_size"] = hs.config.caches.event_cache_size
|
||||||
|
|
||||||
|
|||||||
@@ -166,7 +166,10 @@ class ApplicationService:
|
|||||||
|
|
||||||
@cached(num_args=1, cache_context=True)
|
@cached(num_args=1, cache_context=True)
|
||||||
async def matches_user_in_member_list(
|
async def matches_user_in_member_list(
|
||||||
self, room_id: str, store: "DataStore", cache_context: _CacheContext,
|
self,
|
||||||
|
room_id: str,
|
||||||
|
store: "DataStore",
|
||||||
|
cache_context: _CacheContext,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Check if this service is interested a room based upon it's membership
|
"""Check if this service is interested a room based upon it's membership
|
||||||
|
|
||||||
|
|||||||
@@ -76,9 +76,6 @@ def _is_valid_3pe_result(r, field):
|
|||||||
fields = r["fields"]
|
fields = r["fields"]
|
||||||
if not isinstance(fields, dict):
|
if not isinstance(fields, dict):
|
||||||
return False
|
return False
|
||||||
for k in fields.keys():
|
|
||||||
if not isinstance(fields[k], str):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -230,7 +227,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
await self.put_json(
|
await self.put_json(
|
||||||
uri=uri, json_body=body, args={"access_token": service.hs_token},
|
uri=uri,
|
||||||
|
json_body=body,
|
||||||
|
args={"access_token": service.hs_token},
|
||||||
)
|
)
|
||||||
sent_transactions_counter.labels(service.id).inc()
|
sent_transactions_counter.labels(service.id).inc()
|
||||||
sent_events_counter.labels(service.id).inc(len(events))
|
sent_events_counter.labels(service.id).inc(len(events))
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ MAX_EPHEMERAL_EVENTS_PER_TRANSACTION = 100
|
|||||||
|
|
||||||
|
|
||||||
class ApplicationServiceScheduler:
|
class ApplicationServiceScheduler:
|
||||||
""" Public facing API for this module. Does the required DI to tie the
|
"""Public facing API for this module. Does the required DI to tie the
|
||||||
components together. This also serves as the "event_pool", which in this
|
components together. This also serves as the "event_pool", which in this
|
||||||
case is a simple array.
|
case is a simple array.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -18,18 +18,18 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
import urllib.parse
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from typing import Any, Callable, Iterable, List, MutableMapping, Optional
|
from typing import Any, Iterable, List, MutableMapping, Optional, Union
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import jinja2
|
import jinja2
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
from synapse.util.templates import _create_mxc_to_http_filter, _format_ts_filter
|
||||||
|
|
||||||
|
|
||||||
class ConfigError(Exception):
|
class ConfigError(Exception):
|
||||||
"""Represents a problem parsing the configuration
|
"""Represents a problem parsing the configuration
|
||||||
@@ -147,7 +147,20 @@ class Config:
|
|||||||
return int(value) * size
|
return int(value) * size
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_duration(value):
|
def parse_duration(value: Union[str, int]) -> int:
|
||||||
|
"""Convert a duration as a string or integer to a number of milliseconds.
|
||||||
|
|
||||||
|
If an integer is provided it is treated as milliseconds and is unchanged.
|
||||||
|
|
||||||
|
String durations can have a suffix of 's', 'm', 'h', 'd', 'w', or 'y'.
|
||||||
|
No suffix is treated as milliseconds.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: The duration to parse.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The number of milliseconds in the duration.
|
||||||
|
"""
|
||||||
if isinstance(value, int):
|
if isinstance(value, int):
|
||||||
return value
|
return value
|
||||||
second = 1000
|
second = 1000
|
||||||
@@ -203,11 +216,30 @@ class Config:
|
|||||||
with open(file_path) as file_stream:
|
with open(file_path) as file_stream:
|
||||||
return file_stream.read()
|
return file_stream.read()
|
||||||
|
|
||||||
|
def read_template(self, filename: str) -> jinja2.Template:
|
||||||
|
"""Load a template file from disk.
|
||||||
|
|
||||||
|
This function will attempt to load the given template from the default Synapse
|
||||||
|
template directory.
|
||||||
|
|
||||||
|
Files read are treated as Jinja templates. The templates is not rendered yet
|
||||||
|
and has autoescape enabled.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: A template filename to read.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ConfigError: if the file's path is incorrect or otherwise cannot be read.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A jinja2 template.
|
||||||
|
"""
|
||||||
|
return self.read_templates([filename])[0]
|
||||||
|
|
||||||
def read_templates(
|
def read_templates(
|
||||||
self,
|
self,
|
||||||
filenames: List[str],
|
filenames: List[str],
|
||||||
custom_template_directory: Optional[str] = None,
|
custom_template_directory: Optional[str] = None,
|
||||||
autoescape: bool = False,
|
|
||||||
) -> List[jinja2.Template]:
|
) -> List[jinja2.Template]:
|
||||||
"""Load a list of template files from disk using the given variables.
|
"""Load a list of template files from disk using the given variables.
|
||||||
|
|
||||||
@@ -215,7 +247,8 @@ class Config:
|
|||||||
template directory. If `custom_template_directory` is supplied, that directory
|
template directory. If `custom_template_directory` is supplied, that directory
|
||||||
is tried first.
|
is tried first.
|
||||||
|
|
||||||
Files read are treated as Jinja templates. These templates are not rendered yet.
|
Files read are treated as Jinja templates. The templates are not rendered yet
|
||||||
|
and have autoescape enabled.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
filenames: A list of template filenames to read.
|
filenames: A list of template filenames to read.
|
||||||
@@ -223,16 +256,12 @@ class Config:
|
|||||||
custom_template_directory: A directory to try to look for the templates
|
custom_template_directory: A directory to try to look for the templates
|
||||||
before using the default Synapse template directory instead.
|
before using the default Synapse template directory instead.
|
||||||
|
|
||||||
autoescape: Whether to autoescape variables before inserting them into the
|
|
||||||
template.
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ConfigError: if the file's path is incorrect or otherwise cannot be read.
|
ConfigError: if the file's path is incorrect or otherwise cannot be read.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A list of jinja2 templates.
|
A list of jinja2 templates.
|
||||||
"""
|
"""
|
||||||
templates = []
|
|
||||||
search_directories = [self.default_template_dir]
|
search_directories = [self.default_template_dir]
|
||||||
|
|
||||||
# The loader will first look in the custom template directory (if specified) for the
|
# The loader will first look in the custom template directory (if specified) for the
|
||||||
@@ -248,8 +277,12 @@ class Config:
|
|||||||
# Search the custom template directory as well
|
# Search the custom template directory as well
|
||||||
search_directories.insert(0, custom_template_directory)
|
search_directories.insert(0, custom_template_directory)
|
||||||
|
|
||||||
|
# TODO: switch to synapse.util.templates.build_jinja_env
|
||||||
loader = jinja2.FileSystemLoader(search_directories)
|
loader = jinja2.FileSystemLoader(search_directories)
|
||||||
env = jinja2.Environment(loader=loader, autoescape=autoescape)
|
env = jinja2.Environment(
|
||||||
|
loader=loader,
|
||||||
|
autoescape=jinja2.select_autoescape(),
|
||||||
|
)
|
||||||
|
|
||||||
# Update the environment with our custom filters
|
# Update the environment with our custom filters
|
||||||
env.filters.update(
|
env.filters.update(
|
||||||
@@ -259,44 +292,8 @@ class Config:
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
for filename in filenames:
|
# Load the templates
|
||||||
# Load the template
|
return [env.get_template(filename) for filename in filenames]
|
||||||
template = env.get_template(filename)
|
|
||||||
templates.append(template)
|
|
||||||
|
|
||||||
return templates
|
|
||||||
|
|
||||||
|
|
||||||
def _format_ts_filter(value: int, format: str):
|
|
||||||
return time.strftime(format, time.localtime(value / 1000))
|
|
||||||
|
|
||||||
|
|
||||||
def _create_mxc_to_http_filter(public_baseurl: str) -> Callable:
|
|
||||||
"""Create and return a jinja2 filter that converts MXC urls to HTTP
|
|
||||||
|
|
||||||
Args:
|
|
||||||
public_baseurl: The public, accessible base URL of the homeserver
|
|
||||||
"""
|
|
||||||
|
|
||||||
def mxc_to_http_filter(value, width, height, resize_method="crop"):
|
|
||||||
if value[0:6] != "mxc://":
|
|
||||||
return ""
|
|
||||||
|
|
||||||
server_and_media_id = value[6:]
|
|
||||||
fragment = None
|
|
||||||
if "#" in server_and_media_id:
|
|
||||||
server_and_media_id, fragment = server_and_media_id.split("#", 1)
|
|
||||||
fragment = "#" + fragment
|
|
||||||
|
|
||||||
params = {"width": width, "height": height, "method": resize_method}
|
|
||||||
return "%s_matrix/media/v1/thumbnail/%s?%s%s" % (
|
|
||||||
public_baseurl,
|
|
||||||
server_and_media_id,
|
|
||||||
urllib.parse.urlencode(params),
|
|
||||||
fragment or "",
|
|
||||||
)
|
|
||||||
|
|
||||||
return mxc_to_http_filter
|
|
||||||
|
|
||||||
|
|
||||||
class RootConfig:
|
class RootConfig:
|
||||||
@@ -846,24 +843,24 @@ class ShardedWorkerHandlingConfig:
|
|||||||
instances = attr.ib(type=List[str])
|
instances = attr.ib(type=List[str])
|
||||||
|
|
||||||
def should_handle(self, instance_name: str, key: str) -> bool:
|
def should_handle(self, instance_name: str, key: str) -> bool:
|
||||||
"""Whether this instance is responsible for handling the given key.
|
"""Whether this instance is responsible for handling the given key."""
|
||||||
"""
|
# If no instances are defined we assume some other worker is handling
|
||||||
# If multiple instances are not defined we always return true
|
# this.
|
||||||
if not self.instances or len(self.instances) == 1:
|
if not self.instances:
|
||||||
return True
|
return False
|
||||||
|
|
||||||
return self.get_instance(key) == instance_name
|
return self._get_instance(key) == instance_name
|
||||||
|
|
||||||
def get_instance(self, key: str) -> str:
|
def _get_instance(self, key: str) -> str:
|
||||||
"""Get the instance responsible for handling the given key.
|
"""Get the instance responsible for handling the given key.
|
||||||
|
|
||||||
Note: For things like federation sending the config for which instance
|
Note: For federation sending and pushers the config for which instance
|
||||||
is sending is known only to the sender instance if there is only one.
|
is sending is known only to the sender instance, so we don't expose this
|
||||||
Therefore `should_handle` should be used where possible.
|
method by default.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not self.instances:
|
if not self.instances:
|
||||||
return "master"
|
raise Exception("Unknown worker")
|
||||||
|
|
||||||
if len(self.instances) == 1:
|
if len(self.instances) == 1:
|
||||||
return self.instances[0]
|
return self.instances[0]
|
||||||
@@ -880,4 +877,21 @@ class ShardedWorkerHandlingConfig:
|
|||||||
return self.instances[remainder]
|
return self.instances[remainder]
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig):
|
||||||
|
"""A version of `ShardedWorkerHandlingConfig` that is used for config
|
||||||
|
options where all instances know which instances are responsible for the
|
||||||
|
sharded work.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __attrs_post_init__(self):
|
||||||
|
# We require that `self.instances` is non-empty.
|
||||||
|
if not self.instances:
|
||||||
|
raise Exception("Got empty list of instances for shard config")
|
||||||
|
|
||||||
|
def get_instance(self, key: str) -> str:
|
||||||
|
"""Get the instance responsible for handling the given key."""
|
||||||
|
return self._get_instance(key)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["Config", "RootConfig", "ShardedWorkerHandlingConfig"]
|
__all__ = ["Config", "RootConfig", "ShardedWorkerHandlingConfig"]
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from synapse.config import (
|
|||||||
consent_config,
|
consent_config,
|
||||||
database,
|
database,
|
||||||
emailconfig,
|
emailconfig,
|
||||||
|
experimental,
|
||||||
groups,
|
groups,
|
||||||
jwt_config,
|
jwt_config,
|
||||||
key,
|
key,
|
||||||
@@ -18,6 +19,7 @@ from synapse.config import (
|
|||||||
password_auth_providers,
|
password_auth_providers,
|
||||||
push,
|
push,
|
||||||
ratelimiting,
|
ratelimiting,
|
||||||
|
redis,
|
||||||
registration,
|
registration,
|
||||||
repository,
|
repository,
|
||||||
room_directory,
|
room_directory,
|
||||||
@@ -48,10 +50,11 @@ def path_exists(file_path: str): ...
|
|||||||
|
|
||||||
class RootConfig:
|
class RootConfig:
|
||||||
server: server.ServerConfig
|
server: server.ServerConfig
|
||||||
|
experimental: experimental.ExperimentalConfig
|
||||||
tls: tls.TlsConfig
|
tls: tls.TlsConfig
|
||||||
database: database.DatabaseConfig
|
database: database.DatabaseConfig
|
||||||
logging: logger.LoggingConfig
|
logging: logger.LoggingConfig
|
||||||
ratelimit: ratelimiting.RatelimitConfig
|
ratelimiting: ratelimiting.RatelimitConfig
|
||||||
media: repository.ContentRepositoryConfig
|
media: repository.ContentRepositoryConfig
|
||||||
captcha: captcha.CaptchaConfig
|
captcha: captcha.CaptchaConfig
|
||||||
voip: voip.VoipConfig
|
voip: voip.VoipConfig
|
||||||
@@ -79,6 +82,7 @@ class RootConfig:
|
|||||||
roomdirectory: room_directory.RoomDirectoryConfig
|
roomdirectory: room_directory.RoomDirectoryConfig
|
||||||
thirdpartyrules: third_party_event_rules.ThirdPartyRulesConfig
|
thirdpartyrules: third_party_event_rules.ThirdPartyRulesConfig
|
||||||
tracer: tracer.TracerConfig
|
tracer: tracer.TracerConfig
|
||||||
|
redis: redis.RedisConfig
|
||||||
|
|
||||||
config_classes: List = ...
|
config_classes: List = ...
|
||||||
def __init__(self) -> None: ...
|
def __init__(self) -> None: ...
|
||||||
@@ -145,4 +149,6 @@ class ShardedWorkerHandlingConfig:
|
|||||||
instances: List[str]
|
instances: List[str]
|
||||||
def __init__(self, instances: List[str]) -> None: ...
|
def __init__(self, instances: List[str]) -> None: ...
|
||||||
def should_handle(self, instance_name: str, key: str) -> bool: ...
|
def should_handle(self, instance_name: str, key: str) -> bool: ...
|
||||||
|
|
||||||
|
class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig):
|
||||||
def get_instance(self, key: str) -> str: ...
|
def get_instance(self, key: str) -> str: ...
|
||||||
|
|||||||
@@ -18,8 +18,7 @@ from ._base import Config
|
|||||||
|
|
||||||
|
|
||||||
class AuthConfig(Config):
|
class AuthConfig(Config):
|
||||||
"""Password and login configuration
|
"""Password and login configuration"""
|
||||||
"""
|
|
||||||
|
|
||||||
section = "auth"
|
section = "auth"
|
||||||
|
|
||||||
@@ -38,7 +37,9 @@ class AuthConfig(Config):
|
|||||||
|
|
||||||
# User-interactive authentication
|
# User-interactive authentication
|
||||||
ui_auth = config.get("ui_auth") or {}
|
ui_auth = config.get("ui_auth") or {}
|
||||||
self.ui_auth_session_timeout = ui_auth.get("session_timeout", 0)
|
self.ui_auth_session_timeout = self.parse_duration(
|
||||||
|
ui_auth.get("session_timeout", 0)
|
||||||
|
)
|
||||||
|
|
||||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||||
return """\
|
return """\
|
||||||
@@ -94,11 +95,11 @@ class AuthConfig(Config):
|
|||||||
#require_uppercase: true
|
#require_uppercase: true
|
||||||
|
|
||||||
ui_auth:
|
ui_auth:
|
||||||
# The number of milliseconds to allow a user-interactive authentication
|
# The amount of time to allow a user-interactive authentication session
|
||||||
# session to be active.
|
# to be active.
|
||||||
#
|
#
|
||||||
# This defaults to 0, meaning the user is queried for their credentials
|
# This defaults to 0, meaning the user is queried for their credentials
|
||||||
# before every action, but this can be overridden to alow a single
|
# before every action, but this can be overridden to allow a single
|
||||||
# validation to be re-used. This weakens the protections afforded by
|
# validation to be re-used. This weakens the protections afforded by
|
||||||
# the user-interactive authentication process, by allowing for multiple
|
# the user-interactive authentication process, by allowing for multiple
|
||||||
# (and potentially different) operations to use the same validation session.
|
# (and potentially different) operations to use the same validation session.
|
||||||
@@ -106,5 +107,5 @@ class AuthConfig(Config):
|
|||||||
# Uncomment below to allow for credential validation to last for 15
|
# Uncomment below to allow for credential validation to last for 15
|
||||||
# seconds.
|
# seconds.
|
||||||
#
|
#
|
||||||
#session_timeout: 15000
|
#session_timeout: "15s"
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -28,9 +28,7 @@ class CaptchaConfig(Config):
|
|||||||
"recaptcha_siteverify_api",
|
"recaptcha_siteverify_api",
|
||||||
"https://www.recaptcha.net/recaptcha/api/siteverify",
|
"https://www.recaptcha.net/recaptcha/api/siteverify",
|
||||||
)
|
)
|
||||||
self.recaptcha_template = self.read_templates(
|
self.recaptcha_template = self.read_template("recaptcha.html")
|
||||||
["recaptcha.html"], autoescape=True
|
|
||||||
)[0]
|
|
||||||
|
|
||||||
def generate_config_section(self, **kwargs):
|
def generate_config_section(self, **kwargs):
|
||||||
return """\
|
return """\
|
||||||
|
|||||||
@@ -13,7 +13,12 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ._base import Config
|
from typing import Any, List
|
||||||
|
|
||||||
|
from synapse.config.sso import SsoAttributeRequirement
|
||||||
|
|
||||||
|
from ._base import Config, ConfigError
|
||||||
|
from ._util import validate_config
|
||||||
|
|
||||||
|
|
||||||
class CasConfig(Config):
|
class CasConfig(Config):
|
||||||
@@ -30,14 +35,26 @@ class CasConfig(Config):
|
|||||||
|
|
||||||
if self.cas_enabled:
|
if self.cas_enabled:
|
||||||
self.cas_server_url = cas_config["server_url"]
|
self.cas_server_url = cas_config["server_url"]
|
||||||
self.cas_service_url = cas_config["service_url"]
|
|
||||||
|
# The public baseurl is required because it is used by the redirect
|
||||||
|
# template.
|
||||||
|
public_baseurl = self.public_baseurl
|
||||||
|
if not public_baseurl:
|
||||||
|
raise ConfigError("cas_config requires a public_baseurl to be set")
|
||||||
|
|
||||||
|
# TODO Update this to a _synapse URL.
|
||||||
|
self.cas_service_url = public_baseurl + "_matrix/client/r0/login/cas/ticket"
|
||||||
self.cas_displayname_attribute = cas_config.get("displayname_attribute")
|
self.cas_displayname_attribute = cas_config.get("displayname_attribute")
|
||||||
self.cas_required_attributes = cas_config.get("required_attributes") or {}
|
required_attributes = cas_config.get("required_attributes") or {}
|
||||||
|
self.cas_required_attributes = _parsed_required_attributes_def(
|
||||||
|
required_attributes
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.cas_server_url = None
|
self.cas_server_url = None
|
||||||
self.cas_service_url = None
|
self.cas_service_url = None
|
||||||
self.cas_displayname_attribute = None
|
self.cas_displayname_attribute = None
|
||||||
self.cas_required_attributes = {}
|
self.cas_required_attributes = []
|
||||||
|
|
||||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||||
return """\
|
return """\
|
||||||
@@ -53,10 +70,6 @@ class CasConfig(Config):
|
|||||||
#
|
#
|
||||||
#server_url: "https://cas-server.com"
|
#server_url: "https://cas-server.com"
|
||||||
|
|
||||||
# The public URL of the homeserver.
|
|
||||||
#
|
|
||||||
#service_url: "https://homeserver.domain.com:8448"
|
|
||||||
|
|
||||||
# The attribute of the CAS response to use as the display name.
|
# The attribute of the CAS response to use as the display name.
|
||||||
#
|
#
|
||||||
# If unset, no displayname will be set.
|
# If unset, no displayname will be set.
|
||||||
@@ -73,3 +86,22 @@ class CasConfig(Config):
|
|||||||
# userGroup: "staff"
|
# userGroup: "staff"
|
||||||
# department: None
|
# department: None
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# CAS uses a legacy required attributes mapping, not the one provided by
|
||||||
|
# SsoAttributeRequirement.
|
||||||
|
REQUIRED_ATTRIBUTES_SCHEMA = {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _parsed_required_attributes_def(
|
||||||
|
required_attributes: Any,
|
||||||
|
) -> List[SsoAttributeRequirement]:
|
||||||
|
validate_config(
|
||||||
|
REQUIRED_ATTRIBUTES_SCHEMA,
|
||||||
|
required_attributes,
|
||||||
|
config_path=("cas_config", "required_attributes"),
|
||||||
|
)
|
||||||
|
return [SsoAttributeRequirement(k, v) for k, v in required_attributes.items()]
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ class ConsentConfig(Config):
|
|||||||
|
|
||||||
def read_config(self, config, **kwargs):
|
def read_config(self, config, **kwargs):
|
||||||
consent_config = config.get("user_consent")
|
consent_config = config.get("user_consent")
|
||||||
self.terms_template = self.read_templates(["terms.html"], autoescape=True)[0]
|
self.terms_template = self.read_template("terms.html")
|
||||||
|
|
||||||
if consent_config is None:
|
if consent_config is None:
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -207,8 +207,7 @@ class DatabaseConfig(Config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def get_single_database(self) -> DatabaseConnectionConfig:
|
def get_single_database(self) -> DatabaseConnectionConfig:
|
||||||
"""Returns the database if there is only one, useful for e.g. tests
|
"""Returns the database if there is only one, useful for e.g. tests"""
|
||||||
"""
|
|
||||||
if not self.databases:
|
if not self.databases:
|
||||||
raise Exception("More than one database exists")
|
raise Exception("More than one database exists")
|
||||||
|
|
||||||
|
|||||||
@@ -166,6 +166,11 @@ class EmailConfig(Config):
|
|||||||
if not self.email_notif_from:
|
if not self.email_notif_from:
|
||||||
missing.append("email.notif_from")
|
missing.append("email.notif_from")
|
||||||
|
|
||||||
|
# public_baseurl is required to build password reset and validation links that
|
||||||
|
# will be emailed to users
|
||||||
|
if config.get("public_baseurl") is None:
|
||||||
|
missing.append("public_baseurl")
|
||||||
|
|
||||||
if missing:
|
if missing:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
MISSING_PASSWORD_RESET_CONFIG_ERROR % (", ".join(missing),)
|
MISSING_PASSWORD_RESET_CONFIG_ERROR % (", ".join(missing),)
|
||||||
@@ -264,6 +269,9 @@ class EmailConfig(Config):
|
|||||||
if not self.email_notif_from:
|
if not self.email_notif_from:
|
||||||
missing.append("email.notif_from")
|
missing.append("email.notif_from")
|
||||||
|
|
||||||
|
if config.get("public_baseurl") is None:
|
||||||
|
missing.append("public_baseurl")
|
||||||
|
|
||||||
if missing:
|
if missing:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"email.enable_notifs is True but required keys are missing: %s"
|
"email.enable_notifs is True but required keys are missing: %s"
|
||||||
@@ -281,7 +289,8 @@ class EmailConfig(Config):
|
|||||||
self.email_notif_template_html,
|
self.email_notif_template_html,
|
||||||
self.email_notif_template_text,
|
self.email_notif_template_text,
|
||||||
) = self.read_templates(
|
) = self.read_templates(
|
||||||
[notif_template_html, notif_template_text], template_dir,
|
[notif_template_html, notif_template_text],
|
||||||
|
template_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.email_notif_for_new_users = email_config.get(
|
self.email_notif_for_new_users = email_config.get(
|
||||||
@@ -303,7 +312,8 @@ class EmailConfig(Config):
|
|||||||
self.account_validity_template_html,
|
self.account_validity_template_html,
|
||||||
self.account_validity_template_text,
|
self.account_validity_template_text,
|
||||||
) = self.read_templates(
|
) = self.read_templates(
|
||||||
[expiry_template_html, expiry_template_text], template_dir,
|
[expiry_template_html, expiry_template_text],
|
||||||
|
template_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
subjects_config = email_config.get("subjects", {})
|
subjects_config = email_config.get("subjects", {})
|
||||||
|
|||||||
29
synapse/config/experimental.py
Normal file
29
synapse/config/experimental.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from synapse.config._base import Config
|
||||||
|
from synapse.types import JsonDict
|
||||||
|
|
||||||
|
|
||||||
|
class ExperimentalConfig(Config):
|
||||||
|
"""Config section for enabling experimental features"""
|
||||||
|
|
||||||
|
section = "experimental"
|
||||||
|
|
||||||
|
def read_config(self, config: JsonDict, **kwargs):
|
||||||
|
experimental = config.get("experimental_features") or {}
|
||||||
|
|
||||||
|
# MSC2858 (multiple SSO identity providers)
|
||||||
|
self.msc2858_enabled = experimental.get("msc2858_enabled", False) # type: bool
|
||||||
@@ -41,6 +41,10 @@ class FederationConfig(Config):
|
|||||||
)
|
)
|
||||||
self.federation_metrics_domains = set(federation_metrics_domains)
|
self.federation_metrics_domains = set(federation_metrics_domains)
|
||||||
|
|
||||||
|
self.allow_profile_lookup_over_federation = config.get(
|
||||||
|
"allow_profile_lookup_over_federation", True
|
||||||
|
)
|
||||||
|
|
||||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||||
return """\
|
return """\
|
||||||
## Federation ##
|
## Federation ##
|
||||||
@@ -66,6 +70,12 @@ class FederationConfig(Config):
|
|||||||
#federation_metrics_domains:
|
#federation_metrics_domains:
|
||||||
# - matrix.org
|
# - matrix.org
|
||||||
# - example.com
|
# - example.com
|
||||||
|
|
||||||
|
# Uncomment to disable profile lookup over federation. By default, the
|
||||||
|
# Federation API allows other homeservers to obtain profile data of any user
|
||||||
|
# on this homeserver. Defaults to 'true'.
|
||||||
|
#
|
||||||
|
#allow_profile_lookup_over_federation: false
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ from .cas import CasConfig
|
|||||||
from .consent_config import ConsentConfig
|
from .consent_config import ConsentConfig
|
||||||
from .database import DatabaseConfig
|
from .database import DatabaseConfig
|
||||||
from .emailconfig import EmailConfig
|
from .emailconfig import EmailConfig
|
||||||
|
from .experimental import ExperimentalConfig
|
||||||
from .federation import FederationConfig
|
from .federation import FederationConfig
|
||||||
from .groups import GroupsConfig
|
from .groups import GroupsConfig
|
||||||
from .jwt_config import JWTConfig
|
from .jwt_config import JWTConfig
|
||||||
@@ -57,6 +58,7 @@ class HomeServerConfig(RootConfig):
|
|||||||
|
|
||||||
config_classes = [
|
config_classes = [
|
||||||
ServerConfig,
|
ServerConfig,
|
||||||
|
ExperimentalConfig,
|
||||||
TlsConfig,
|
TlsConfig,
|
||||||
FederationConfig,
|
FederationConfig,
|
||||||
CacheConfig,
|
CacheConfig,
|
||||||
|
|||||||
@@ -162,7 +162,10 @@ class LoggingConfig(Config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
logging_group.add_argument(
|
logging_group.add_argument(
|
||||||
"-f", "--log-file", dest="log_file", help=argparse.SUPPRESS,
|
"-f",
|
||||||
|
"--log-file",
|
||||||
|
dest="log_file",
|
||||||
|
help=argparse.SUPPRESS,
|
||||||
)
|
)
|
||||||
|
|
||||||
def generate_files(self, config, config_dir_path):
|
def generate_files(self, config, config_dir_path):
|
||||||
|
|||||||
@@ -14,7 +14,6 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import string
|
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from typing import Iterable, Optional, Tuple, Type
|
from typing import Iterable, Optional, Tuple, Type
|
||||||
|
|
||||||
@@ -55,7 +54,9 @@ class OIDCConfig(Config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
public_baseurl = self.public_baseurl
|
public_baseurl = self.public_baseurl
|
||||||
self.oidc_callback_url = public_baseurl + "_synapse/oidc/callback"
|
if public_baseurl is None:
|
||||||
|
raise ConfigError("oidc_config requires a public_baseurl to be set")
|
||||||
|
self.oidc_callback_url = public_baseurl + "_synapse/client/oidc/callback"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def oidc_enabled(self) -> bool:
|
def oidc_enabled(self) -> bool:
|
||||||
@@ -79,10 +80,14 @@ class OIDCConfig(Config):
|
|||||||
# offer the user a choice of login mechanisms.
|
# offer the user a choice of login mechanisms.
|
||||||
#
|
#
|
||||||
# idp_icon: An optional icon for this identity provider, which is presented
|
# idp_icon: An optional icon for this identity provider, which is presented
|
||||||
# by identity picker pages. If given, must be an MXC URI of the format
|
# by clients and Synapse's own IdP picker page. If given, must be an
|
||||||
# mxc://<server-name>/<media-id>. (An easy way to obtain such an MXC URI
|
# MXC URI of the format mxc://<server-name>/<media-id>. (An easy way to
|
||||||
# is to upload an image to an (unencrypted) room and then copy the "url"
|
# obtain such an MXC URI is to upload an image to an (unencrypted) room
|
||||||
# from the source of the event.)
|
# and then copy the "url" from the source of the event.)
|
||||||
|
#
|
||||||
|
# idp_brand: An optional brand for this identity provider, allowing clients
|
||||||
|
# to style the login flow according to the identity provider in question.
|
||||||
|
# See the spec for possible options here.
|
||||||
#
|
#
|
||||||
# discover: set to 'false' to disable the use of the OIDC discovery mechanism
|
# discover: set to 'false' to disable the use of the OIDC discovery mechanism
|
||||||
# to discover endpoints. Defaults to true.
|
# to discover endpoints. Defaults to true.
|
||||||
@@ -143,17 +148,21 @@ class OIDCConfig(Config):
|
|||||||
#
|
#
|
||||||
# For the default provider, the following settings are available:
|
# For the default provider, the following settings are available:
|
||||||
#
|
#
|
||||||
# sub: name of the claim containing a unique identifier for the
|
# subject_claim: name of the claim containing a unique identifier
|
||||||
# user. Defaults to 'sub', which OpenID Connect compliant
|
# for the user. Defaults to 'sub', which OpenID Connect
|
||||||
# providers should provide.
|
# compliant providers should provide.
|
||||||
#
|
#
|
||||||
# localpart_template: Jinja2 template for the localpart of the MXID.
|
# localpart_template: Jinja2 template for the localpart of the MXID.
|
||||||
# If this is not set, the user will be prompted to choose their
|
# If this is not set, the user will be prompted to choose their
|
||||||
# own username.
|
# own username (see 'sso_auth_account_details.html' in the 'sso'
|
||||||
|
# section of this file).
|
||||||
#
|
#
|
||||||
# display_name_template: Jinja2 template for the display name to set
|
# display_name_template: Jinja2 template for the display name to set
|
||||||
# on first login. If unset, no displayname will be set.
|
# on first login. If unset, no displayname will be set.
|
||||||
#
|
#
|
||||||
|
# email_template: Jinja2 template for the email address of the user.
|
||||||
|
# If unset, no email address will be added to the account.
|
||||||
|
#
|
||||||
# extra_attributes: a map of Jinja2 templates for extra attributes
|
# extra_attributes: a map of Jinja2 templates for extra attributes
|
||||||
# to send back to the client during login.
|
# to send back to the client during login.
|
||||||
# Note that these are non-standard and clients will ignore them
|
# Note that these are non-standard and clients will ignore them
|
||||||
@@ -189,6 +198,12 @@ class OIDCConfig(Config):
|
|||||||
# userinfo_endpoint: "https://accounts.example.com/userinfo"
|
# userinfo_endpoint: "https://accounts.example.com/userinfo"
|
||||||
# jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
# jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||||
# skip_verification: true
|
# skip_verification: true
|
||||||
|
# user_mapping_provider:
|
||||||
|
# config:
|
||||||
|
# subject_claim: "id"
|
||||||
|
# localpart_template: "{{{{ user.login }}}}"
|
||||||
|
# display_name_template: "{{{{ user.name }}}}"
|
||||||
|
# email_template: "{{{{ user.email }}}}"
|
||||||
|
|
||||||
# For use with Keycloak
|
# For use with Keycloak
|
||||||
#
|
#
|
||||||
@@ -203,6 +218,7 @@ class OIDCConfig(Config):
|
|||||||
#
|
#
|
||||||
#- idp_id: github
|
#- idp_id: github
|
||||||
# idp_name: Github
|
# idp_name: Github
|
||||||
|
# idp_brand: org.matrix.github
|
||||||
# discover: false
|
# discover: false
|
||||||
# issuer: "https://github.com/"
|
# issuer: "https://github.com/"
|
||||||
# client_id: "your-client-id" # TO BE FILLED
|
# client_id: "your-client-id" # TO BE FILLED
|
||||||
@@ -214,8 +230,8 @@ class OIDCConfig(Config):
|
|||||||
# user_mapping_provider:
|
# user_mapping_provider:
|
||||||
# config:
|
# config:
|
||||||
# subject_claim: "id"
|
# subject_claim: "id"
|
||||||
# localpart_template: "{{ user.login }}"
|
# localpart_template: "{{{{ user.login }}}}"
|
||||||
# display_name_template: "{{ user.name }}"
|
# display_name_template: "{{{{ user.name }}}}"
|
||||||
""".format(
|
""".format(
|
||||||
mapping_provider=DEFAULT_USER_MAPPING_PROVIDER
|
mapping_provider=DEFAULT_USER_MAPPING_PROVIDER
|
||||||
)
|
)
|
||||||
@@ -226,11 +242,22 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"required": ["issuer", "client_id", "client_secret"],
|
"required": ["issuer", "client_id", "client_secret"],
|
||||||
"properties": {
|
"properties": {
|
||||||
# TODO: fix the maxLength here depending on what MSC2528 decides
|
"idp_id": {
|
||||||
# remember that we prefix the ID given here with `oidc-`
|
"type": "string",
|
||||||
"idp_id": {"type": "string", "minLength": 1, "maxLength": 128},
|
"minLength": 1,
|
||||||
|
# MSC2858 allows a maxlen of 255, but we prefix with "oidc-"
|
||||||
|
"maxLength": 250,
|
||||||
|
"pattern": "^[A-Za-z0-9._~-]+$",
|
||||||
|
},
|
||||||
"idp_name": {"type": "string"},
|
"idp_name": {"type": "string"},
|
||||||
"idp_icon": {"type": "string"},
|
"idp_icon": {"type": "string"},
|
||||||
|
"idp_brand": {
|
||||||
|
"type": "string",
|
||||||
|
# MSC2758-style namespaced identifier
|
||||||
|
"minLength": 1,
|
||||||
|
"maxLength": 255,
|
||||||
|
"pattern": "^[a-z][a-z0-9_.-]*$",
|
||||||
|
},
|
||||||
"discover": {"type": "boolean"},
|
"discover": {"type": "boolean"},
|
||||||
"issuer": {"type": "string"},
|
"issuer": {"type": "string"},
|
||||||
"client_id": {"type": "string"},
|
"client_id": {"type": "string"},
|
||||||
@@ -328,9 +355,10 @@ def _parse_oidc_config_dict(
|
|||||||
ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
|
ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
|
||||||
ump_config.setdefault("config", {})
|
ump_config.setdefault("config", {})
|
||||||
|
|
||||||
(user_mapping_provider_class, user_mapping_provider_config,) = load_module(
|
(
|
||||||
ump_config, config_path + ("user_mapping_provider",)
|
user_mapping_provider_class,
|
||||||
)
|
user_mapping_provider_config,
|
||||||
|
) = load_module(ump_config, config_path + ("user_mapping_provider",))
|
||||||
|
|
||||||
# Ensure loaded user mapping module has defined all necessary methods
|
# Ensure loaded user mapping module has defined all necessary methods
|
||||||
required_methods = [
|
required_methods = [
|
||||||
@@ -345,29 +373,16 @@ def _parse_oidc_config_dict(
|
|||||||
if missing_methods:
|
if missing_methods:
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"Class %s is missing required "
|
"Class %s is missing required "
|
||||||
"methods: %s" % (user_mapping_provider_class, ", ".join(missing_methods),),
|
"methods: %s"
|
||||||
|
% (
|
||||||
|
user_mapping_provider_class,
|
||||||
|
", ".join(missing_methods),
|
||||||
|
),
|
||||||
config_path + ("user_mapping_provider", "module"),
|
config_path + ("user_mapping_provider", "module"),
|
||||||
)
|
)
|
||||||
|
|
||||||
# MSC2858 will apply certain limits in what can be used as an IdP id, so let's
|
|
||||||
# enforce those limits now.
|
|
||||||
# TODO: factor out this stuff to a generic function
|
|
||||||
idp_id = oidc_config.get("idp_id", "oidc")
|
idp_id = oidc_config.get("idp_id", "oidc")
|
||||||
|
|
||||||
# TODO: update this validity check based on what MSC2858 decides.
|
|
||||||
valid_idp_chars = set(string.ascii_lowercase + string.digits + "-._")
|
|
||||||
|
|
||||||
if any(c not in valid_idp_chars for c in idp_id):
|
|
||||||
raise ConfigError(
|
|
||||||
'idp_id may only contain a-z, 0-9, "-", ".", "_"',
|
|
||||||
config_path + ("idp_id",),
|
|
||||||
)
|
|
||||||
|
|
||||||
if idp_id[0] not in string.ascii_lowercase:
|
|
||||||
raise ConfigError(
|
|
||||||
"idp_id must start with a-z", config_path + ("idp_id",),
|
|
||||||
)
|
|
||||||
|
|
||||||
# prefix the given IDP with a prefix specific to the SSO mechanism, to avoid
|
# prefix the given IDP with a prefix specific to the SSO mechanism, to avoid
|
||||||
# clashes with other mechs (such as SAML, CAS).
|
# clashes with other mechs (such as SAML, CAS).
|
||||||
#
|
#
|
||||||
@@ -393,6 +408,7 @@ def _parse_oidc_config_dict(
|
|||||||
idp_id=idp_id,
|
idp_id=idp_id,
|
||||||
idp_name=oidc_config.get("idp_name", "OIDC"),
|
idp_name=oidc_config.get("idp_name", "OIDC"),
|
||||||
idp_icon=idp_icon,
|
idp_icon=idp_icon,
|
||||||
|
idp_brand=oidc_config.get("idp_brand"),
|
||||||
discover=oidc_config.get("discover", True),
|
discover=oidc_config.get("discover", True),
|
||||||
issuer=oidc_config["issuer"],
|
issuer=oidc_config["issuer"],
|
||||||
client_id=oidc_config["client_id"],
|
client_id=oidc_config["client_id"],
|
||||||
@@ -423,6 +439,9 @@ class OidcProviderConfig:
|
|||||||
# Optional MXC URI for icon for this IdP.
|
# Optional MXC URI for icon for this IdP.
|
||||||
idp_icon = attr.ib(type=Optional[str])
|
idp_icon = attr.ib(type=Optional[str])
|
||||||
|
|
||||||
|
# Optional brand identifier for this IdP.
|
||||||
|
idp_brand = attr.ib(type=Optional[str])
|
||||||
|
|
||||||
# whether the OIDC discovery mechanism is used to discover endpoints
|
# whether the OIDC discovery mechanism is used to discover endpoints
|
||||||
discover = attr.ib(type=bool)
|
discover = attr.ib(type=bool)
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ._base import Config, ShardedWorkerHandlingConfig
|
from ._base import Config
|
||||||
|
|
||||||
|
|
||||||
class PushConfig(Config):
|
class PushConfig(Config):
|
||||||
@@ -27,9 +27,6 @@ class PushConfig(Config):
|
|||||||
"group_unread_count_by_room", True
|
"group_unread_count_by_room", True
|
||||||
)
|
)
|
||||||
|
|
||||||
pusher_instances = config.get("pusher_instances") or []
|
|
||||||
self.pusher_shard_config = ShardedWorkerHandlingConfig(pusher_instances)
|
|
||||||
|
|
||||||
# There was a a 'redact_content' setting but mistakenly read from the
|
# There was a a 'redact_content' setting but mistakenly read from the
|
||||||
# 'email'section'. Check for the flag in the 'push' section, and log,
|
# 'email'section'. Check for the flag in the 'push' section, and log,
|
||||||
# but do not honour it to avoid nasty surprises when people upgrade.
|
# but do not honour it to avoid nasty surprises when people upgrade.
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ class RateLimitConfig:
|
|||||||
defaults={"per_second": 0.17, "burst_count": 3.0},
|
defaults={"per_second": 0.17, "burst_count": 3.0},
|
||||||
):
|
):
|
||||||
self.per_second = config.get("per_second", defaults["per_second"])
|
self.per_second = config.get("per_second", defaults["per_second"])
|
||||||
self.burst_count = config.get("burst_count", defaults["burst_count"])
|
self.burst_count = int(config.get("burst_count", defaults["burst_count"]))
|
||||||
|
|
||||||
|
|
||||||
class FederationRateLimitConfig:
|
class FederationRateLimitConfig:
|
||||||
@@ -102,6 +102,30 @@ class RatelimitConfig(Config):
|
|||||||
defaults={"per_second": 0.01, "burst_count": 3},
|
defaults={"per_second": 0.01, "burst_count": 3},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Ratelimit cross-user key requests:
|
||||||
|
# * For local requests this is keyed by the sending device.
|
||||||
|
# * For requests received over federation this is keyed by the origin.
|
||||||
|
#
|
||||||
|
# Note that this isn't exposed in the configuration as it is obscure.
|
||||||
|
self.rc_key_requests = RateLimitConfig(
|
||||||
|
config.get("rc_key_requests", {}),
|
||||||
|
defaults={"per_second": 20, "burst_count": 100},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.rc_3pid_validation = RateLimitConfig(
|
||||||
|
config.get("rc_3pid_validation") or {},
|
||||||
|
defaults={"per_second": 0.003, "burst_count": 5},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.rc_invites_per_room = RateLimitConfig(
|
||||||
|
config.get("rc_invites", {}).get("per_room", {}),
|
||||||
|
defaults={"per_second": 0.3, "burst_count": 10},
|
||||||
|
)
|
||||||
|
self.rc_invites_per_user = RateLimitConfig(
|
||||||
|
config.get("rc_invites", {}).get("per_user", {}),
|
||||||
|
defaults={"per_second": 0.003, "burst_count": 5},
|
||||||
|
)
|
||||||
|
|
||||||
def generate_config_section(self, **kwargs):
|
def generate_config_section(self, **kwargs):
|
||||||
return """\
|
return """\
|
||||||
## Ratelimiting ##
|
## Ratelimiting ##
|
||||||
@@ -131,6 +155,9 @@ class RatelimitConfig(Config):
|
|||||||
# users are joining rooms the server is already in (this is cheap) vs
|
# users are joining rooms the server is already in (this is cheap) vs
|
||||||
# "remote" for when users are trying to join rooms not on the server (which
|
# "remote" for when users are trying to join rooms not on the server (which
|
||||||
# can be more expensive)
|
# can be more expensive)
|
||||||
|
# - one for ratelimiting how often a user or IP can attempt to validate a 3PID.
|
||||||
|
# - two for ratelimiting how often invites can be sent in a room or to a
|
||||||
|
# specific user.
|
||||||
#
|
#
|
||||||
# The defaults are as shown below.
|
# The defaults are as shown below.
|
||||||
#
|
#
|
||||||
@@ -164,7 +191,18 @@ class RatelimitConfig(Config):
|
|||||||
# remote:
|
# remote:
|
||||||
# per_second: 0.01
|
# per_second: 0.01
|
||||||
# burst_count: 3
|
# burst_count: 3
|
||||||
|
#
|
||||||
|
#rc_3pid_validation:
|
||||||
|
# per_second: 0.003
|
||||||
|
# burst_count: 5
|
||||||
|
#
|
||||||
|
#rc_invites:
|
||||||
|
# per_room:
|
||||||
|
# per_second: 0.3
|
||||||
|
# burst_count: 10
|
||||||
|
# per_user:
|
||||||
|
# per_second: 0.003
|
||||||
|
# burst_count: 5
|
||||||
|
|
||||||
# Ratelimiting settings for incoming federation
|
# Ratelimiting settings for incoming federation
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -49,6 +49,10 @@ class AccountValidityConfig(Config):
|
|||||||
|
|
||||||
self.startup_job_max_delta = self.period * 10.0 / 100.0
|
self.startup_job_max_delta = self.period * 10.0 / 100.0
|
||||||
|
|
||||||
|
if self.renew_by_email_enabled:
|
||||||
|
if "public_baseurl" not in synapse_config:
|
||||||
|
raise ConfigError("Can't send renewal emails without 'public_baseurl'")
|
||||||
|
|
||||||
template_dir = config.get("template_dir")
|
template_dir = config.get("template_dir")
|
||||||
|
|
||||||
if not template_dir:
|
if not template_dir:
|
||||||
@@ -105,6 +109,13 @@ class RegistrationConfig(Config):
|
|||||||
account_threepid_delegates = config.get("account_threepid_delegates") or {}
|
account_threepid_delegates = config.get("account_threepid_delegates") or {}
|
||||||
self.account_threepid_delegate_email = account_threepid_delegates.get("email")
|
self.account_threepid_delegate_email = account_threepid_delegates.get("email")
|
||||||
self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")
|
self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")
|
||||||
|
if self.account_threepid_delegate_msisdn and not self.public_baseurl:
|
||||||
|
raise ConfigError(
|
||||||
|
"The configuration option `public_baseurl` is required if "
|
||||||
|
"`account_threepid_delegate.msisdn` is set, such that "
|
||||||
|
"clients know where to submit validation tokens to. Please "
|
||||||
|
"configure `public_baseurl`."
|
||||||
|
)
|
||||||
|
|
||||||
self.default_identity_server = config.get("default_identity_server")
|
self.default_identity_server = config.get("default_identity_server")
|
||||||
self.allow_guest_access = config.get("allow_guest_access", False)
|
self.allow_guest_access = config.get("allow_guest_access", False)
|
||||||
@@ -176,9 +187,7 @@ class RegistrationConfig(Config):
|
|||||||
self.session_lifetime = session_lifetime
|
self.session_lifetime = session_lifetime
|
||||||
|
|
||||||
# The success template used during fallback auth.
|
# The success template used during fallback auth.
|
||||||
self.fallback_success_template = self.read_templates(
|
self.fallback_success_template = self.read_template("auth_success.html")
|
||||||
["auth_success.html"], autoescape=True
|
|
||||||
)[0]
|
|
||||||
|
|
||||||
def generate_config_section(self, generate_secrets=False, **kwargs):
|
def generate_config_section(self, generate_secrets=False, **kwargs):
|
||||||
if generate_secrets:
|
if generate_secrets:
|
||||||
@@ -229,9 +238,8 @@ class RegistrationConfig(Config):
|
|||||||
# send an email to the account's email address with a renewal link. By
|
# send an email to the account's email address with a renewal link. By
|
||||||
# default, no such emails are sent.
|
# default, no such emails are sent.
|
||||||
#
|
#
|
||||||
# If you enable this setting, you will also need to fill out the 'email'
|
# If you enable this setting, you will also need to fill out the 'email' and
|
||||||
# configuration section. You should also check that 'public_baseurl' is set
|
# 'public_baseurl' configuration sections.
|
||||||
# correctly.
|
|
||||||
#
|
#
|
||||||
#renew_at: 1w
|
#renew_at: 1w
|
||||||
|
|
||||||
@@ -322,7 +330,8 @@ class RegistrationConfig(Config):
|
|||||||
# The identity server which we suggest that clients should use when users log
|
# The identity server which we suggest that clients should use when users log
|
||||||
# in on this server.
|
# in on this server.
|
||||||
#
|
#
|
||||||
# (By default, no suggestion is made, so it is left up to the client.)
|
# (By default, no suggestion is made, so it is left up to the client.
|
||||||
|
# This setting is ignored unless public_baseurl is also set.)
|
||||||
#
|
#
|
||||||
#default_identity_server: https://matrix.org
|
#default_identity_server: https://matrix.org
|
||||||
|
|
||||||
@@ -347,6 +356,8 @@ class RegistrationConfig(Config):
|
|||||||
# by the Matrix Identity Service API specification:
|
# by the Matrix Identity Service API specification:
|
||||||
# https://matrix.org/docs/spec/identity_service/latest
|
# https://matrix.org/docs/spec/identity_service/latest
|
||||||
#
|
#
|
||||||
|
# If a delegate is specified, the config option public_baseurl must also be filled out.
|
||||||
|
#
|
||||||
account_threepid_delegates:
|
account_threepid_delegates:
|
||||||
#email: https://example.com # Delegate email sending to example.com
|
#email: https://example.com # Delegate email sending to example.com
|
||||||
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
||||||
@@ -380,6 +391,8 @@ class RegistrationConfig(Config):
|
|||||||
# By default, any room aliases included in this list will be created
|
# By default, any room aliases included in this list will be created
|
||||||
# as a publicly joinable room when the first user registers for the
|
# as a publicly joinable room when the first user registers for the
|
||||||
# homeserver. This behaviour can be customised with the settings below.
|
# homeserver. This behaviour can be customised with the settings below.
|
||||||
|
# If the room already exists, make certain it is a publicly joinable
|
||||||
|
# room. The join rule of the room must be set to 'public'.
|
||||||
#
|
#
|
||||||
#auto_join_rooms:
|
#auto_join_rooms:
|
||||||
# - "#example:example.com"
|
# - "#example:example.com"
|
||||||
|
|||||||
@@ -17,9 +17,7 @@ import os
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
|
|
||||||
from netaddr import IPSet
|
from synapse.config.server import DEFAULT_IP_RANGE_BLACKLIST, generate_ip_set
|
||||||
|
|
||||||
from synapse.config.server import DEFAULT_IP_RANGE_BLACKLIST
|
|
||||||
from synapse.python_dependencies import DependencyException, check_requirements
|
from synapse.python_dependencies import DependencyException, check_requirements
|
||||||
from synapse.util.module_loader import load_module
|
from synapse.util.module_loader import load_module
|
||||||
|
|
||||||
@@ -54,7 +52,7 @@ MediaStorageProviderConfig = namedtuple(
|
|||||||
|
|
||||||
|
|
||||||
def parse_thumbnail_requirements(thumbnail_sizes):
|
def parse_thumbnail_requirements(thumbnail_sizes):
|
||||||
""" Takes a list of dictionaries with "width", "height", and "method" keys
|
"""Takes a list of dictionaries with "width", "height", and "method" keys
|
||||||
and creates a map from image media types to the thumbnail size, thumbnailing
|
and creates a map from image media types to the thumbnail size, thumbnailing
|
||||||
method, and thumbnail media type to precalculate
|
method, and thumbnail media type to precalculate
|
||||||
|
|
||||||
@@ -187,16 +185,17 @@ class ContentRepositoryConfig(Config):
|
|||||||
"to work"
|
"to work"
|
||||||
)
|
)
|
||||||
|
|
||||||
self.url_preview_ip_range_blacklist = IPSet(
|
|
||||||
config["url_preview_ip_range_blacklist"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# we always blacklist '0.0.0.0' and '::', which are supposed to be
|
# we always blacklist '0.0.0.0' and '::', which are supposed to be
|
||||||
# unroutable addresses.
|
# unroutable addresses.
|
||||||
self.url_preview_ip_range_blacklist.update(["0.0.0.0", "::"])
|
self.url_preview_ip_range_blacklist = generate_ip_set(
|
||||||
|
config["url_preview_ip_range_blacklist"],
|
||||||
|
["0.0.0.0", "::"],
|
||||||
|
config_path=("url_preview_ip_range_blacklist",),
|
||||||
|
)
|
||||||
|
|
||||||
self.url_preview_ip_range_whitelist = IPSet(
|
self.url_preview_ip_range_whitelist = generate_ip_set(
|
||||||
config.get("url_preview_ip_range_whitelist", ())
|
config.get("url_preview_ip_range_whitelist", ()),
|
||||||
|
config_path=("url_preview_ip_range_whitelist",),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
|
self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
|
||||||
@@ -207,7 +206,6 @@ class ContentRepositoryConfig(Config):
|
|||||||
|
|
||||||
def generate_config_section(self, data_dir_path, **kwargs):
|
def generate_config_section(self, data_dir_path, **kwargs):
|
||||||
media_store = os.path.join(data_dir_path, "media_store")
|
media_store = os.path.join(data_dir_path, "media_store")
|
||||||
uploads_path = os.path.join(data_dir_path, "uploads")
|
|
||||||
|
|
||||||
formatted_thumbnail_sizes = "".join(
|
formatted_thumbnail_sizes = "".join(
|
||||||
THUMBNAIL_SIZE_YAML % s for s in DEFAULT_THUMBNAIL_SIZES
|
THUMBNAIL_SIZE_YAML % s for s in DEFAULT_THUMBNAIL_SIZES
|
||||||
|
|||||||
@@ -123,7 +123,7 @@ class RoomDirectoryConfig(Config):
|
|||||||
alias (str)
|
alias (str)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
boolean: True if user is allowed to crate the alias
|
boolean: True if user is allowed to create the alias
|
||||||
"""
|
"""
|
||||||
for rule in self._alias_creation_rules:
|
for rule in self._alias_creation_rules:
|
||||||
if rule.matches(user_id, room_id, [alias]):
|
if rule.matches(user_id, room_id, [alias]):
|
||||||
|
|||||||
@@ -17,8 +17,7 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any, List
|
from typing import Any, List
|
||||||
|
|
||||||
import attr
|
from synapse.config.sso import SsoAttributeRequirement
|
||||||
|
|
||||||
from synapse.python_dependencies import DependencyException, check_requirements
|
from synapse.python_dependencies import DependencyException, check_requirements
|
||||||
from synapse.util.module_loader import load_module, load_python_module
|
from synapse.util.module_loader import load_module, load_python_module
|
||||||
|
|
||||||
@@ -189,13 +188,15 @@ class SAML2Config(Config):
|
|||||||
import saml2
|
import saml2
|
||||||
|
|
||||||
public_baseurl = self.public_baseurl
|
public_baseurl = self.public_baseurl
|
||||||
|
if public_baseurl is None:
|
||||||
|
raise ConfigError("saml2_config requires a public_baseurl to be set")
|
||||||
|
|
||||||
if self.saml2_grandfathered_mxid_source_attribute:
|
if self.saml2_grandfathered_mxid_source_attribute:
|
||||||
optional_attributes.add(self.saml2_grandfathered_mxid_source_attribute)
|
optional_attributes.add(self.saml2_grandfathered_mxid_source_attribute)
|
||||||
optional_attributes -= required_attributes
|
optional_attributes -= required_attributes
|
||||||
|
|
||||||
metadata_url = public_baseurl + "_matrix/saml2/metadata.xml"
|
metadata_url = public_baseurl + "_synapse/client/saml2/metadata.xml"
|
||||||
response_url = public_baseurl + "_matrix/saml2/authn_response"
|
response_url = public_baseurl + "_synapse/client/saml2/authn_response"
|
||||||
return {
|
return {
|
||||||
"entityid": metadata_url,
|
"entityid": metadata_url,
|
||||||
"service": {
|
"service": {
|
||||||
@@ -233,10 +234,10 @@ class SAML2Config(Config):
|
|||||||
# enable SAML login.
|
# enable SAML login.
|
||||||
#
|
#
|
||||||
# Once SAML support is enabled, a metadata file will be exposed at
|
# Once SAML support is enabled, a metadata file will be exposed at
|
||||||
# https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to
|
# https://<server>:<port>/_synapse/client/saml2/metadata.xml, which you may be able to
|
||||||
# use to configure your SAML IdP with. Alternatively, you can manually configure
|
# use to configure your SAML IdP with. Alternatively, you can manually configure
|
||||||
# the IdP to use an ACS location of
|
# the IdP to use an ACS location of
|
||||||
# https://<server>:<port>/_matrix/saml2/authn_response.
|
# https://<server>:<port>/_synapse/client/saml2/authn_response.
|
||||||
#
|
#
|
||||||
saml2_config:
|
saml2_config:
|
||||||
# `sp_config` is the configuration for the pysaml2 Service Provider.
|
# `sp_config` is the configuration for the pysaml2 Service Provider.
|
||||||
@@ -396,32 +397,18 @@ class SAML2Config(Config):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@attr.s(frozen=True)
|
|
||||||
class SamlAttributeRequirement:
|
|
||||||
"""Object describing a single requirement for SAML attributes."""
|
|
||||||
|
|
||||||
attribute = attr.ib(type=str)
|
|
||||||
value = attr.ib(type=str)
|
|
||||||
|
|
||||||
JSON_SCHEMA = {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {"attribute": {"type": "string"}, "value": {"type": "string"}},
|
|
||||||
"required": ["attribute", "value"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
ATTRIBUTE_REQUIREMENTS_SCHEMA = {
|
ATTRIBUTE_REQUIREMENTS_SCHEMA = {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": SamlAttributeRequirement.JSON_SCHEMA,
|
"items": SsoAttributeRequirement.JSON_SCHEMA,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def _parse_attribute_requirements_def(
|
def _parse_attribute_requirements_def(
|
||||||
attribute_requirements: Any,
|
attribute_requirements: Any,
|
||||||
) -> List[SamlAttributeRequirement]:
|
) -> List[SsoAttributeRequirement]:
|
||||||
validate_config(
|
validate_config(
|
||||||
ATTRIBUTE_REQUIREMENTS_SCHEMA,
|
ATTRIBUTE_REQUIREMENTS_SCHEMA,
|
||||||
attribute_requirements,
|
attribute_requirements,
|
||||||
config_path=["saml2_config", "attribute_requirements"],
|
config_path=("saml2_config", "attribute_requirements"),
|
||||||
)
|
)
|
||||||
return [SamlAttributeRequirement(**x) for x in attribute_requirements]
|
return [SsoAttributeRequirement(**x) for x in attribute_requirements]
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
@@ -23,7 +24,7 @@ from typing import Any, Dict, Iterable, List, Optional, Set
|
|||||||
|
|
||||||
import attr
|
import attr
|
||||||
import yaml
|
import yaml
|
||||||
from netaddr import IPSet
|
from netaddr import AddrFormatError, IPNetwork, IPSet
|
||||||
|
|
||||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||||
from synapse.util.stringutils import parse_and_validate_server_name
|
from synapse.util.stringutils import parse_and_validate_server_name
|
||||||
@@ -40,6 +41,71 @@ logger = logging.Logger(__name__)
|
|||||||
# in the list.
|
# in the list.
|
||||||
DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"]
|
DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"]
|
||||||
|
|
||||||
|
|
||||||
|
def _6to4(network: IPNetwork) -> IPNetwork:
|
||||||
|
"""Convert an IPv4 network into a 6to4 IPv6 network per RFC 3056."""
|
||||||
|
|
||||||
|
# 6to4 networks consist of:
|
||||||
|
# * 2002 as the first 16 bits
|
||||||
|
# * The first IPv4 address in the network hex-encoded as the next 32 bits
|
||||||
|
# * The new prefix length needs to include the bits from the 2002 prefix.
|
||||||
|
hex_network = hex(network.first)[2:]
|
||||||
|
hex_network = ("0" * (8 - len(hex_network))) + hex_network
|
||||||
|
return IPNetwork(
|
||||||
|
"2002:%s:%s::/%d"
|
||||||
|
% (
|
||||||
|
hex_network[:4],
|
||||||
|
hex_network[4:],
|
||||||
|
16 + network.prefixlen,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_ip_set(
|
||||||
|
ip_addresses: Optional[Iterable[str]],
|
||||||
|
extra_addresses: Optional[Iterable[str]] = None,
|
||||||
|
config_path: Optional[Iterable[str]] = None,
|
||||||
|
) -> IPSet:
|
||||||
|
"""
|
||||||
|
Generate an IPSet from a list of IP addresses or CIDRs.
|
||||||
|
|
||||||
|
Additionally, for each IPv4 network in the list of IP addresses, also
|
||||||
|
includes the corresponding IPv6 networks.
|
||||||
|
|
||||||
|
This includes:
|
||||||
|
|
||||||
|
* IPv4-Compatible IPv6 Address (see RFC 4291, section 2.5.5.1)
|
||||||
|
* IPv4-Mapped IPv6 Address (see RFC 4291, section 2.5.5.2)
|
||||||
|
* 6to4 Address (see RFC 3056, section 2)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ip_addresses: An iterable of IP addresses or CIDRs.
|
||||||
|
extra_addresses: An iterable of IP addresses or CIDRs.
|
||||||
|
config_path: The path in the configuration for error messages.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A new IP set.
|
||||||
|
"""
|
||||||
|
result = IPSet()
|
||||||
|
for ip in itertools.chain(ip_addresses or (), extra_addresses or ()):
|
||||||
|
try:
|
||||||
|
network = IPNetwork(ip)
|
||||||
|
except AddrFormatError as e:
|
||||||
|
raise ConfigError(
|
||||||
|
"Invalid IP range provided: %s." % (ip,), config_path
|
||||||
|
) from e
|
||||||
|
result.add(network)
|
||||||
|
|
||||||
|
# It is possible that these already exist in the set, but that's OK.
|
||||||
|
if ":" not in str(network):
|
||||||
|
result.add(IPNetwork(network).ipv6(ipv4_compatible=True))
|
||||||
|
result.add(IPNetwork(network).ipv6(ipv4_compatible=False))
|
||||||
|
result.add(_6to4(network))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# IP ranges that are considered private / unroutable / don't make sense.
|
||||||
DEFAULT_IP_RANGE_BLACKLIST = [
|
DEFAULT_IP_RANGE_BLACKLIST = [
|
||||||
# Localhost
|
# Localhost
|
||||||
"127.0.0.0/8",
|
"127.0.0.0/8",
|
||||||
@@ -53,6 +119,8 @@ DEFAULT_IP_RANGE_BLACKLIST = [
|
|||||||
"192.0.0.0/24",
|
"192.0.0.0/24",
|
||||||
# Link-local networks.
|
# Link-local networks.
|
||||||
"169.254.0.0/16",
|
"169.254.0.0/16",
|
||||||
|
# Formerly used for 6to4 relay.
|
||||||
|
"192.88.99.0/24",
|
||||||
# Testing networks.
|
# Testing networks.
|
||||||
"198.18.0.0/15",
|
"198.18.0.0/15",
|
||||||
"192.0.2.0/24",
|
"192.0.2.0/24",
|
||||||
@@ -66,6 +134,12 @@ DEFAULT_IP_RANGE_BLACKLIST = [
|
|||||||
"fe80::/10",
|
"fe80::/10",
|
||||||
# Unique local addresses.
|
# Unique local addresses.
|
||||||
"fc00::/7",
|
"fc00::/7",
|
||||||
|
# Testing networks.
|
||||||
|
"2001:db8::/32",
|
||||||
|
# Multicast.
|
||||||
|
"ff00::/8",
|
||||||
|
# Site-local addresses
|
||||||
|
"fec0::/10",
|
||||||
]
|
]
|
||||||
|
|
||||||
DEFAULT_ROOM_VERSION = "6"
|
DEFAULT_ROOM_VERSION = "6"
|
||||||
@@ -161,11 +235,7 @@ class ServerConfig(Config):
|
|||||||
self.print_pidfile = config.get("print_pidfile")
|
self.print_pidfile = config.get("print_pidfile")
|
||||||
self.user_agent_suffix = config.get("user_agent_suffix")
|
self.user_agent_suffix = config.get("user_agent_suffix")
|
||||||
self.use_frozen_dicts = config.get("use_frozen_dicts", False)
|
self.use_frozen_dicts = config.get("use_frozen_dicts", False)
|
||||||
self.public_baseurl = config.get("public_baseurl") or "https://%s/" % (
|
self.public_baseurl = config.get("public_baseurl")
|
||||||
self.server_name,
|
|
||||||
)
|
|
||||||
if self.public_baseurl[-1] != "/":
|
|
||||||
self.public_baseurl += "/"
|
|
||||||
|
|
||||||
# Whether to enable user presence.
|
# Whether to enable user presence.
|
||||||
self.use_presence = config.get("use_presence", True)
|
self.use_presence = config.get("use_presence", True)
|
||||||
@@ -189,7 +259,14 @@ class ServerConfig(Config):
|
|||||||
# Whether to require sharing a room with a user to retrieve their
|
# Whether to require sharing a room with a user to retrieve their
|
||||||
# profile data
|
# profile data
|
||||||
self.limit_profile_requests_to_users_who_share_rooms = config.get(
|
self.limit_profile_requests_to_users_who_share_rooms = config.get(
|
||||||
"limit_profile_requests_to_users_who_share_rooms", False,
|
"limit_profile_requests_to_users_who_share_rooms",
|
||||||
|
False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Whether to retrieve and display profile data for a user when they
|
||||||
|
# are invited to a room
|
||||||
|
self.include_profile_data_on_invite = config.get(
|
||||||
|
"include_profile_data_on_invite", True
|
||||||
)
|
)
|
||||||
|
|
||||||
if "restrict_public_rooms_to_local_users" in config and (
|
if "restrict_public_rooms_to_local_users" in config and (
|
||||||
@@ -294,17 +371,15 @@ class ServerConfig(Config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Attempt to create an IPSet from the given ranges
|
# Attempt to create an IPSet from the given ranges
|
||||||
try:
|
|
||||||
self.ip_range_blacklist = IPSet(ip_range_blacklist)
|
|
||||||
except Exception as e:
|
|
||||||
raise ConfigError("Invalid range(s) provided in ip_range_blacklist.") from e
|
|
||||||
# Always blacklist 0.0.0.0, ::
|
|
||||||
self.ip_range_blacklist.update(["0.0.0.0", "::"])
|
|
||||||
|
|
||||||
try:
|
# Always blacklist 0.0.0.0, ::
|
||||||
self.ip_range_whitelist = IPSet(config.get("ip_range_whitelist", ()))
|
self.ip_range_blacklist = generate_ip_set(
|
||||||
except Exception as e:
|
ip_range_blacklist, ["0.0.0.0", "::"], config_path=("ip_range_blacklist",)
|
||||||
raise ConfigError("Invalid range(s) provided in ip_range_whitelist.") from e
|
)
|
||||||
|
|
||||||
|
self.ip_range_whitelist = generate_ip_set(
|
||||||
|
config.get("ip_range_whitelist", ()), config_path=("ip_range_whitelist",)
|
||||||
|
)
|
||||||
|
|
||||||
# The federation_ip_range_blacklist is used for backwards-compatibility
|
# The federation_ip_range_blacklist is used for backwards-compatibility
|
||||||
# and only applies to federation and identity servers. If it is not given,
|
# and only applies to federation and identity servers. If it is not given,
|
||||||
@@ -312,16 +387,16 @@ class ServerConfig(Config):
|
|||||||
federation_ip_range_blacklist = config.get(
|
federation_ip_range_blacklist = config.get(
|
||||||
"federation_ip_range_blacklist", ip_range_blacklist
|
"federation_ip_range_blacklist", ip_range_blacklist
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
self.federation_ip_range_blacklist = IPSet(federation_ip_range_blacklist)
|
|
||||||
except Exception as e:
|
|
||||||
raise ConfigError(
|
|
||||||
"Invalid range(s) provided in federation_ip_range_blacklist."
|
|
||||||
) from e
|
|
||||||
# Always blacklist 0.0.0.0, ::
|
# Always blacklist 0.0.0.0, ::
|
||||||
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
|
self.federation_ip_range_blacklist = generate_ip_set(
|
||||||
|
federation_ip_range_blacklist,
|
||||||
|
["0.0.0.0", "::"],
|
||||||
|
config_path=("federation_ip_range_blacklist",),
|
||||||
|
)
|
||||||
|
|
||||||
self.start_pushers = config.get("start_pushers", True)
|
if self.public_baseurl is not None:
|
||||||
|
if self.public_baseurl[-1] != "/":
|
||||||
|
self.public_baseurl += "/"
|
||||||
|
|
||||||
# (undocumented) option for torturing the worker-mode replication a bit,
|
# (undocumented) option for torturing the worker-mode replication a bit,
|
||||||
# for testing. The value defines the number of milliseconds to pause before
|
# for testing. The value defines the number of milliseconds to pause before
|
||||||
@@ -550,7 +625,9 @@ class ServerConfig(Config):
|
|||||||
if manhole:
|
if manhole:
|
||||||
self.listeners.append(
|
self.listeners.append(
|
||||||
ListenerConfig(
|
ListenerConfig(
|
||||||
port=manhole, bind_addresses=["127.0.0.1"], type="manhole",
|
port=manhole,
|
||||||
|
bind_addresses=["127.0.0.1"],
|
||||||
|
type="manhole",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -586,7 +663,8 @@ class ServerConfig(Config):
|
|||||||
# and letting the client know which email address is bound to an account and
|
# and letting the client know which email address is bound to an account and
|
||||||
# which one isn't.
|
# which one isn't.
|
||||||
self.request_token_inhibit_3pid_errors = config.get(
|
self.request_token_inhibit_3pid_errors = config.get(
|
||||||
"request_token_inhibit_3pid_errors", False,
|
"request_token_inhibit_3pid_errors",
|
||||||
|
False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# List of users trialing the new experimental default push rules. This setting is
|
# List of users trialing the new experimental default push rules. This setting is
|
||||||
@@ -748,10 +826,6 @@ class ServerConfig(Config):
|
|||||||
# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
|
# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
|
||||||
# 'listeners' below).
|
# 'listeners' below).
|
||||||
#
|
#
|
||||||
# If this is left unset, it defaults to 'https://<server_name>/'. (Note that
|
|
||||||
# that will not work unless you configure Synapse or a reverse-proxy to listen
|
|
||||||
# on port 443.)
|
|
||||||
#
|
|
||||||
#public_baseurl: https://example.com/
|
#public_baseurl: https://example.com/
|
||||||
|
|
||||||
# Set the soft limit on the number of file descriptors synapse can use
|
# Set the soft limit on the number of file descriptors synapse can use
|
||||||
@@ -779,6 +853,14 @@ class ServerConfig(Config):
|
|||||||
#
|
#
|
||||||
#limit_profile_requests_to_users_who_share_rooms: true
|
#limit_profile_requests_to_users_who_share_rooms: true
|
||||||
|
|
||||||
|
# Uncomment to prevent a user's profile data from being retrieved and
|
||||||
|
# displayed in a room until they have joined it. By default, a user's
|
||||||
|
# profile data is included in an invite event, regardless of the values
|
||||||
|
# of the above two settings, and whether or not the users share a server.
|
||||||
|
# Defaults to 'true'.
|
||||||
|
#
|
||||||
|
#include_profile_data_on_invite: false
|
||||||
|
|
||||||
# If set to 'true', removes the need for authentication to access the server's
|
# If set to 'true', removes the need for authentication to access the server's
|
||||||
# public rooms directory through the client API, meaning that anyone can
|
# public rooms directory through the client API, meaning that anyone can
|
||||||
# query the room directory. Defaults to 'false'.
|
# query the room directory. Defaults to 'false'.
|
||||||
|
|||||||
@@ -12,14 +12,30 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
import attr
|
||||||
|
|
||||||
from ._base import Config
|
from ._base import Config
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(frozen=True)
|
||||||
|
class SsoAttributeRequirement:
|
||||||
|
"""Object describing a single requirement for SSO attributes."""
|
||||||
|
|
||||||
|
attribute = attr.ib(type=str)
|
||||||
|
# If a value is not given, than the attribute must simply exist.
|
||||||
|
value = attr.ib(type=Optional[str])
|
||||||
|
|
||||||
|
JSON_SCHEMA = {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {"attribute": {"type": "string"}, "value": {"type": "string"}},
|
||||||
|
"required": ["attribute", "value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class SSOConfig(Config):
|
class SSOConfig(Config):
|
||||||
"""SSO Configuration
|
"""SSO Configuration"""
|
||||||
"""
|
|
||||||
|
|
||||||
section = "sso"
|
section = "sso"
|
||||||
|
|
||||||
@@ -27,7 +43,7 @@ class SSOConfig(Config):
|
|||||||
sso_config = config.get("sso") or {} # type: Dict[str, Any]
|
sso_config = config.get("sso") or {} # type: Dict[str, Any]
|
||||||
|
|
||||||
# The sso-specific template_dir
|
# The sso-specific template_dir
|
||||||
template_dir = sso_config.get("template_dir")
|
self.sso_template_dir = sso_config.get("template_dir")
|
||||||
|
|
||||||
# Read templates from disk
|
# Read templates from disk
|
||||||
(
|
(
|
||||||
@@ -48,7 +64,7 @@ class SSOConfig(Config):
|
|||||||
"sso_auth_success.html",
|
"sso_auth_success.html",
|
||||||
"sso_auth_bad_user.html",
|
"sso_auth_bad_user.html",
|
||||||
],
|
],
|
||||||
template_dir,
|
self.sso_template_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
# These templates have no placeholders, so render them here
|
# These templates have no placeholders, so render them here
|
||||||
@@ -64,8 +80,11 @@ class SSOConfig(Config):
|
|||||||
# gracefully to the client). This would make it pointless to ask the user for
|
# gracefully to the client). This would make it pointless to ask the user for
|
||||||
# confirmation, since the URL the confirmation page would be showing wouldn't be
|
# confirmation, since the URL the confirmation page would be showing wouldn't be
|
||||||
# the client's.
|
# the client's.
|
||||||
login_fallback_url = self.public_baseurl + "_matrix/static/client/login"
|
# public_baseurl is an optional setting, so we only add the fallback's URL to the
|
||||||
self.sso_client_whitelist.append(login_fallback_url)
|
# list if it's provided (because we can't figure out what that URL is otherwise).
|
||||||
|
if self.public_baseurl:
|
||||||
|
login_fallback_url = self.public_baseurl + "_matrix/static/client/login"
|
||||||
|
self.sso_client_whitelist.append(login_fallback_url)
|
||||||
|
|
||||||
def generate_config_section(self, **kwargs):
|
def generate_config_section(self, **kwargs):
|
||||||
return """\
|
return """\
|
||||||
@@ -83,9 +102,9 @@ class SSOConfig(Config):
|
|||||||
# phishing attacks from evil.site. To avoid this, include a slash after the
|
# phishing attacks from evil.site. To avoid this, include a slash after the
|
||||||
# hostname: "https://my.client/".
|
# hostname: "https://my.client/".
|
||||||
#
|
#
|
||||||
# The login fallback page (used by clients that don't natively support the
|
# If public_baseurl is set, then the login fallback page (used by clients
|
||||||
# required login flows) is automatically whitelisted in addition to any URLs
|
# that don't natively support the required login flows) is whitelisted in
|
||||||
# in this list.
|
# addition to any URLs in this list.
|
||||||
#
|
#
|
||||||
# By default, this list is empty.
|
# By default, this list is empty.
|
||||||
#
|
#
|
||||||
@@ -106,15 +125,19 @@ class SSOConfig(Config):
|
|||||||
#
|
#
|
||||||
# When rendering, this template is given the following variables:
|
# When rendering, this template is given the following variables:
|
||||||
# * redirect_url: the URL that the user will be redirected to after
|
# * redirect_url: the URL that the user will be redirected to after
|
||||||
# login. Needs manual escaping (see
|
# login.
|
||||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
|
||||||
#
|
#
|
||||||
# * server_name: the homeserver's name.
|
# * server_name: the homeserver's name.
|
||||||
#
|
#
|
||||||
# * providers: a list of available Identity Providers. Each element is
|
# * providers: a list of available Identity Providers. Each element is
|
||||||
# an object with the following attributes:
|
# an object with the following attributes:
|
||||||
|
#
|
||||||
# * idp_id: unique identifier for the IdP
|
# * idp_id: unique identifier for the IdP
|
||||||
# * idp_name: user-facing name for the IdP
|
# * idp_name: user-facing name for the IdP
|
||||||
|
# * idp_icon: if specified in the IdP config, an MXC URI for an icon
|
||||||
|
# for the IdP
|
||||||
|
# * idp_brand: if specified in the IdP config, a textual identifier
|
||||||
|
# for the brand of the IdP
|
||||||
#
|
#
|
||||||
# The rendered HTML page should contain a form which submits its results
|
# The rendered HTML page should contain a form which submits its results
|
||||||
# back as a GET request, with the following query parameters:
|
# back as a GET request, with the following query parameters:
|
||||||
@@ -124,33 +147,101 @@ class SSOConfig(Config):
|
|||||||
#
|
#
|
||||||
# * idp: the 'idp_id' of the chosen IDP.
|
# * idp: the 'idp_id' of the chosen IDP.
|
||||||
#
|
#
|
||||||
|
# * HTML page to prompt new users to enter a userid and confirm other
|
||||||
|
# details: 'sso_auth_account_details.html'. This is only shown if the
|
||||||
|
# SSO implementation (with any user_mapping_provider) does not return
|
||||||
|
# a localpart.
|
||||||
|
#
|
||||||
|
# When rendering, this template is given the following variables:
|
||||||
|
#
|
||||||
|
# * server_name: the homeserver's name.
|
||||||
|
#
|
||||||
|
# * idp: details of the SSO Identity Provider that the user logged in
|
||||||
|
# with: an object with the following attributes:
|
||||||
|
#
|
||||||
|
# * idp_id: unique identifier for the IdP
|
||||||
|
# * idp_name: user-facing name for the IdP
|
||||||
|
# * idp_icon: if specified in the IdP config, an MXC URI for an icon
|
||||||
|
# for the IdP
|
||||||
|
# * idp_brand: if specified in the IdP config, a textual identifier
|
||||||
|
# for the brand of the IdP
|
||||||
|
#
|
||||||
|
# * user_attributes: an object containing details about the user that
|
||||||
|
# we received from the IdP. May have the following attributes:
|
||||||
|
#
|
||||||
|
# * display_name: the user's display_name
|
||||||
|
# * emails: a list of email addresses
|
||||||
|
#
|
||||||
|
# The template should render a form which submits the following fields:
|
||||||
|
#
|
||||||
|
# * username: the localpart of the user's chosen user id
|
||||||
|
#
|
||||||
|
# * HTML page allowing the user to consent to the server's terms and
|
||||||
|
# conditions. This is only shown for new users, and only if
|
||||||
|
# `user_consent.require_at_registration` is set.
|
||||||
|
#
|
||||||
|
# When rendering, this template is given the following variables:
|
||||||
|
#
|
||||||
|
# * server_name: the homeserver's name.
|
||||||
|
#
|
||||||
|
# * user_id: the user's matrix proposed ID.
|
||||||
|
#
|
||||||
|
# * user_profile.display_name: the user's proposed display name, if any.
|
||||||
|
#
|
||||||
|
# * consent_version: the version of the terms that the user will be
|
||||||
|
# shown
|
||||||
|
#
|
||||||
|
# * terms_url: a link to the page showing the terms.
|
||||||
|
#
|
||||||
|
# The template should render a form which submits the following fields:
|
||||||
|
#
|
||||||
|
# * accepted_version: the version of the terms accepted by the user
|
||||||
|
# (ie, 'consent_version' from the input variables).
|
||||||
|
#
|
||||||
# * HTML page for a confirmation step before redirecting back to the client
|
# * HTML page for a confirmation step before redirecting back to the client
|
||||||
# with the login token: 'sso_redirect_confirm.html'.
|
# with the login token: 'sso_redirect_confirm.html'.
|
||||||
#
|
#
|
||||||
# When rendering, this template is given three variables:
|
# When rendering, this template is given the following variables:
|
||||||
# * redirect_url: the URL the user is about to be redirected to. Needs
|
#
|
||||||
# manual escaping (see
|
# * redirect_url: the URL the user is about to be redirected to.
|
||||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
|
||||||
#
|
#
|
||||||
# * display_url: the same as `redirect_url`, but with the query
|
# * display_url: the same as `redirect_url`, but with the query
|
||||||
# parameters stripped. The intention is to have a
|
# parameters stripped. The intention is to have a
|
||||||
# human-readable URL to show to users, not to use it as
|
# human-readable URL to show to users, not to use it as
|
||||||
# the final address to redirect to. Needs manual escaping
|
# the final address to redirect to.
|
||||||
# (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
|
||||||
#
|
#
|
||||||
# * server_name: the homeserver's name.
|
# * server_name: the homeserver's name.
|
||||||
#
|
#
|
||||||
|
# * new_user: a boolean indicating whether this is the user's first time
|
||||||
|
# logging in.
|
||||||
|
#
|
||||||
|
# * user_id: the user's matrix ID.
|
||||||
|
#
|
||||||
|
# * user_profile.avatar_url: an MXC URI for the user's avatar, if any.
|
||||||
|
# None if the user has not set an avatar.
|
||||||
|
#
|
||||||
|
# * user_profile.display_name: the user's display name. None if the user
|
||||||
|
# has not set a display name.
|
||||||
|
#
|
||||||
# * HTML page which notifies the user that they are authenticating to confirm
|
# * HTML page which notifies the user that they are authenticating to confirm
|
||||||
# an operation on their account during the user interactive authentication
|
# an operation on their account during the user interactive authentication
|
||||||
# process: 'sso_auth_confirm.html'.
|
# process: 'sso_auth_confirm.html'.
|
||||||
#
|
#
|
||||||
# When rendering, this template is given the following variables:
|
# When rendering, this template is given the following variables:
|
||||||
# * redirect_url: the URL the user is about to be redirected to. Needs
|
# * redirect_url: the URL the user is about to be redirected to.
|
||||||
# manual escaping (see
|
|
||||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
|
||||||
#
|
#
|
||||||
# * description: the operation which the user is being asked to confirm
|
# * description: the operation which the user is being asked to confirm
|
||||||
#
|
#
|
||||||
|
# * idp: details of the Identity Provider that we will use to confirm
|
||||||
|
# the user's identity: an object with the following attributes:
|
||||||
|
#
|
||||||
|
# * idp_id: unique identifier for the IdP
|
||||||
|
# * idp_name: user-facing name for the IdP
|
||||||
|
# * idp_icon: if specified in the IdP config, an MXC URI for an icon
|
||||||
|
# for the IdP
|
||||||
|
# * idp_brand: if specified in the IdP config, a textual identifier
|
||||||
|
# for the brand of the IdP
|
||||||
|
#
|
||||||
# * HTML page shown after a successful user interactive authentication session:
|
# * HTML page shown after a successful user interactive authentication session:
|
||||||
# 'sso_auth_success.html'.
|
# 'sso_auth_success.html'.
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -24,32 +24,46 @@ class UserDirectoryConfig(Config):
|
|||||||
section = "userdirectory"
|
section = "userdirectory"
|
||||||
|
|
||||||
def read_config(self, config, **kwargs):
|
def read_config(self, config, **kwargs):
|
||||||
self.user_directory_search_enabled = True
|
user_directory_config = config.get("user_directory") or {}
|
||||||
self.user_directory_search_all_users = False
|
self.user_directory_search_enabled = user_directory_config.get("enabled", True)
|
||||||
user_directory_config = config.get("user_directory", None)
|
self.user_directory_search_all_users = user_directory_config.get(
|
||||||
if user_directory_config:
|
"search_all_users", False
|
||||||
self.user_directory_search_enabled = user_directory_config.get(
|
)
|
||||||
"enabled", True
|
self.user_directory_search_prefer_local_users = user_directory_config.get(
|
||||||
)
|
"prefer_local_users", False
|
||||||
self.user_directory_search_all_users = user_directory_config.get(
|
)
|
||||||
"search_all_users", False
|
|
||||||
)
|
|
||||||
|
|
||||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||||
return """
|
return """
|
||||||
# User Directory configuration
|
# User Directory configuration
|
||||||
#
|
#
|
||||||
# 'enabled' defines whether users can search the user directory. If
|
user_directory:
|
||||||
# false then empty responses are returned to all queries. Defaults to
|
# Defines whether users can search the user directory. If false then
|
||||||
# true.
|
# empty responses are returned to all queries. Defaults to true.
|
||||||
#
|
#
|
||||||
# 'search_all_users' defines whether to search all users visible to your HS
|
# Uncomment to disable the user directory.
|
||||||
# when searching the user directory, rather than limiting to users visible
|
#
|
||||||
# in public rooms. Defaults to false. If you set it True, you'll have to
|
#enabled: false
|
||||||
# rebuild the user_directory search indexes, see
|
|
||||||
# https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
|
# Defines whether to search all users visible to your HS when searching
|
||||||
#
|
# the user directory, rather than limiting to users visible in public
|
||||||
#user_directory:
|
# rooms. Defaults to false.
|
||||||
# enabled: true
|
#
|
||||||
# search_all_users: false
|
# If you set it true, you'll have to rebuild the user_directory search
|
||||||
|
# indexes, see:
|
||||||
|
# https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
|
||||||
|
#
|
||||||
|
# Uncomment to return search results containing all known users, even if that
|
||||||
|
# user does not share a room with the requester.
|
||||||
|
#
|
||||||
|
#search_all_users: true
|
||||||
|
|
||||||
|
# Defines whether to prefer local users in search query results.
|
||||||
|
# If True, local users are more likely to appear above remote users
|
||||||
|
# when searching the user directory. Defaults to false.
|
||||||
|
#
|
||||||
|
# Uncomment to prefer local over remote users in user directory search
|
||||||
|
# results.
|
||||||
|
#
|
||||||
|
#prefer_local_users: true
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -17,9 +17,28 @@ from typing import List, Union
|
|||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
from ._base import Config, ConfigError, ShardedWorkerHandlingConfig
|
from ._base import (
|
||||||
|
Config,
|
||||||
|
ConfigError,
|
||||||
|
RoutableShardedWorkerHandlingConfig,
|
||||||
|
ShardedWorkerHandlingConfig,
|
||||||
|
)
|
||||||
from .server import ListenerConfig, parse_listener_def
|
from .server import ListenerConfig, parse_listener_def
|
||||||
|
|
||||||
|
_FEDERATION_SENDER_WITH_SEND_FEDERATION_ENABLED_ERROR = """
|
||||||
|
The send_federation config option must be disabled in the main
|
||||||
|
synapse process before they can be run in a separate worker.
|
||||||
|
|
||||||
|
Please add ``send_federation: false`` to the main config
|
||||||
|
"""
|
||||||
|
|
||||||
|
_PUSHER_WITH_START_PUSHERS_ENABLED_ERROR = """
|
||||||
|
The start_pushers config option must be disabled in the main
|
||||||
|
synapse process before they can be run in a separate worker.
|
||||||
|
|
||||||
|
Please add ``start_pushers: false`` to the main config
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]:
|
def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]:
|
||||||
"""Helper for allowing parsing a string or list of strings to a config
|
"""Helper for allowing parsing a string or list of strings to a config
|
||||||
@@ -33,8 +52,7 @@ def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]:
|
|||||||
|
|
||||||
@attr.s
|
@attr.s
|
||||||
class InstanceLocationConfig:
|
class InstanceLocationConfig:
|
||||||
"""The host and port to talk to an instance via HTTP replication.
|
"""The host and port to talk to an instance via HTTP replication."""
|
||||||
"""
|
|
||||||
|
|
||||||
host = attr.ib(type=str)
|
host = attr.ib(type=str)
|
||||||
port = attr.ib(type=int)
|
port = attr.ib(type=int)
|
||||||
@@ -54,13 +72,19 @@ class WriterLocations:
|
|||||||
)
|
)
|
||||||
typing = attr.ib(default="master", type=str)
|
typing = attr.ib(default="master", type=str)
|
||||||
to_device = attr.ib(
|
to_device = attr.ib(
|
||||||
default=["master"], type=List[str], converter=_instance_to_list_converter,
|
default=["master"],
|
||||||
|
type=List[str],
|
||||||
|
converter=_instance_to_list_converter,
|
||||||
)
|
)
|
||||||
account_data = attr.ib(
|
account_data = attr.ib(
|
||||||
default=["master"], type=List[str], converter=_instance_to_list_converter,
|
default=["master"],
|
||||||
|
type=List[str],
|
||||||
|
converter=_instance_to_list_converter,
|
||||||
)
|
)
|
||||||
receipts = attr.ib(
|
receipts = attr.ib(
|
||||||
default=["master"], type=List[str], converter=_instance_to_list_converter,
|
default=["master"],
|
||||||
|
type=List[str],
|
||||||
|
converter=_instance_to_list_converter,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -98,6 +122,7 @@ class WorkerConfig(Config):
|
|||||||
self.worker_replication_secret = config.get("worker_replication_secret", None)
|
self.worker_replication_secret = config.get("worker_replication_secret", None)
|
||||||
|
|
||||||
self.worker_name = config.get("worker_name", self.worker_app)
|
self.worker_name = config.get("worker_name", self.worker_app)
|
||||||
|
self.instance_name = self.worker_name or "master"
|
||||||
|
|
||||||
self.worker_main_http_uri = config.get("worker_main_http_uri", None)
|
self.worker_main_http_uri = config.get("worker_main_http_uri", None)
|
||||||
|
|
||||||
@@ -107,16 +132,47 @@ class WorkerConfig(Config):
|
|||||||
if manhole:
|
if manhole:
|
||||||
self.worker_listeners.append(
|
self.worker_listeners.append(
|
||||||
ListenerConfig(
|
ListenerConfig(
|
||||||
port=manhole, bind_addresses=["127.0.0.1"], type="manhole",
|
port=manhole,
|
||||||
|
bind_addresses=["127.0.0.1"],
|
||||||
|
type="manhole",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Whether to send federation traffic out in this process. This only
|
# Handle federation sender configuration.
|
||||||
# applies to some federation traffic, and so shouldn't be used to
|
#
|
||||||
# "disable" federation
|
# There are two ways of configuring which instances handle federation
|
||||||
self.send_federation = config.get("send_federation", True)
|
# sending:
|
||||||
|
# 1. The old way where "send_federation" is set to false and running a
|
||||||
|
# `synapse.app.federation_sender` worker app.
|
||||||
|
# 2. Specifying the workers sending federation in
|
||||||
|
# `federation_sender_instances`.
|
||||||
|
#
|
||||||
|
|
||||||
federation_sender_instances = config.get("federation_sender_instances") or []
|
send_federation = config.get("send_federation", True)
|
||||||
|
|
||||||
|
federation_sender_instances = config.get("federation_sender_instances")
|
||||||
|
if federation_sender_instances is None:
|
||||||
|
# Default to an empty list, which means "another, unknown, worker is
|
||||||
|
# responsible for it".
|
||||||
|
federation_sender_instances = []
|
||||||
|
|
||||||
|
# If no federation sender instances are set we check if
|
||||||
|
# `send_federation` is set, which means use master
|
||||||
|
if send_federation:
|
||||||
|
federation_sender_instances = ["master"]
|
||||||
|
|
||||||
|
if self.worker_app == "synapse.app.federation_sender":
|
||||||
|
if send_federation:
|
||||||
|
# If we're running federation senders, and not using
|
||||||
|
# `federation_sender_instances`, then we should have
|
||||||
|
# explicitly set `send_federation` to false.
|
||||||
|
raise ConfigError(
|
||||||
|
_FEDERATION_SENDER_WITH_SEND_FEDERATION_ENABLED_ERROR
|
||||||
|
)
|
||||||
|
|
||||||
|
federation_sender_instances = [self.worker_name]
|
||||||
|
|
||||||
|
self.send_federation = self.instance_name in federation_sender_instances
|
||||||
self.federation_shard_config = ShardedWorkerHandlingConfig(
|
self.federation_shard_config = ShardedWorkerHandlingConfig(
|
||||||
federation_sender_instances
|
federation_sender_instances
|
||||||
)
|
)
|
||||||
@@ -157,7 +213,37 @@ class WorkerConfig(Config):
|
|||||||
"Must only specify one instance to handle `receipts` messages."
|
"Must only specify one instance to handle `receipts` messages."
|
||||||
)
|
)
|
||||||
|
|
||||||
self.events_shard_config = ShardedWorkerHandlingConfig(self.writers.events)
|
if len(self.writers.events) == 0:
|
||||||
|
raise ConfigError("Must specify at least one instance to handle `events`.")
|
||||||
|
|
||||||
|
self.events_shard_config = RoutableShardedWorkerHandlingConfig(
|
||||||
|
self.writers.events
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle sharded push
|
||||||
|
start_pushers = config.get("start_pushers", True)
|
||||||
|
pusher_instances = config.get("pusher_instances")
|
||||||
|
if pusher_instances is None:
|
||||||
|
# Default to an empty list, which means "another, unknown, worker is
|
||||||
|
# responsible for it".
|
||||||
|
pusher_instances = []
|
||||||
|
|
||||||
|
# If no pushers instances are set we check if `start_pushers` is
|
||||||
|
# set, which means use master
|
||||||
|
if start_pushers:
|
||||||
|
pusher_instances = ["master"]
|
||||||
|
|
||||||
|
if self.worker_app == "synapse.app.pusher":
|
||||||
|
if start_pushers:
|
||||||
|
# If we're running pushers, and not using
|
||||||
|
# `pusher_instances`, then we should have explicitly set
|
||||||
|
# `start_pushers` to false.
|
||||||
|
raise ConfigError(_PUSHER_WITH_START_PUSHERS_ENABLED_ERROR)
|
||||||
|
|
||||||
|
pusher_instances = [self.instance_name]
|
||||||
|
|
||||||
|
self.start_pushers = self.instance_name in pusher_instances
|
||||||
|
self.pusher_shard_config = ShardedWorkerHandlingConfig(pusher_instances)
|
||||||
|
|
||||||
# Whether this worker should run background tasks or not.
|
# Whether this worker should run background tasks or not.
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -125,19 +125,24 @@ class FederationPolicyForHTTPS:
|
|||||||
self._no_verify_ssl_context = _no_verify_ssl.getContext()
|
self._no_verify_ssl_context = _no_verify_ssl.getContext()
|
||||||
self._no_verify_ssl_context.set_info_callback(_context_info_cb)
|
self._no_verify_ssl_context.set_info_callback(_context_info_cb)
|
||||||
|
|
||||||
def get_options(self, host: bytes):
|
self._should_verify = self._config.federation_verify_certificates
|
||||||
|
|
||||||
|
self._federation_certificate_verification_whitelist = (
|
||||||
|
self._config.federation_certificate_verification_whitelist
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_options(self, host: bytes):
|
||||||
# IPolicyForHTTPS.get_options takes bytes, but we want to compare
|
# IPolicyForHTTPS.get_options takes bytes, but we want to compare
|
||||||
# against the str whitelist. The hostnames in the whitelist are already
|
# against the str whitelist. The hostnames in the whitelist are already
|
||||||
# IDNA-encoded like the hosts will be here.
|
# IDNA-encoded like the hosts will be here.
|
||||||
ascii_host = host.decode("ascii")
|
ascii_host = host.decode("ascii")
|
||||||
|
|
||||||
# Check if certificate verification has been enabled
|
# Check if certificate verification has been enabled
|
||||||
should_verify = self._config.federation_verify_certificates
|
should_verify = self._should_verify
|
||||||
|
|
||||||
# Check if we've disabled certificate verification for this host
|
# Check if we've disabled certificate verification for this host
|
||||||
if should_verify:
|
if self._should_verify:
|
||||||
for regex in self._config.federation_certificate_verification_whitelist:
|
for regex in self._federation_certificate_verification_whitelist:
|
||||||
if regex.match(ascii_host):
|
if regex.match(ascii_host):
|
||||||
should_verify = False
|
should_verify = False
|
||||||
break
|
break
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ def check(
|
|||||||
do_sig_check: bool = True,
|
do_sig_check: bool = True,
|
||||||
do_size_check: bool = True,
|
do_size_check: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
""" Checks if this event is correctly authed.
|
"""Checks if this event is correctly authed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
room_version_obj: the version of the room
|
room_version_obj: the version of the room
|
||||||
@@ -423,7 +423,9 @@ def _can_send_event(event: EventBase, auth_events: StateMap[EventBase]) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
def check_redaction(
|
def check_redaction(
|
||||||
room_version_obj: RoomVersion, event: EventBase, auth_events: StateMap[EventBase],
|
room_version_obj: RoomVersion,
|
||||||
|
event: EventBase,
|
||||||
|
auth_events: StateMap[EventBase],
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Check whether the event sender is allowed to redact the target event.
|
"""Check whether the event sender is allowed to redact the target event.
|
||||||
|
|
||||||
@@ -459,7 +461,9 @@ def check_redaction(
|
|||||||
|
|
||||||
|
|
||||||
def _check_power_levels(
|
def _check_power_levels(
|
||||||
room_version_obj: RoomVersion, event: EventBase, auth_events: StateMap[EventBase],
|
room_version_obj: RoomVersion,
|
||||||
|
event: EventBase,
|
||||||
|
auth_events: StateMap[EventBase],
|
||||||
) -> None:
|
) -> None:
|
||||||
user_list = event.content.get("users", {})
|
user_list = event.content.get("users", {})
|
||||||
# Validate users
|
# Validate users
|
||||||
|
|||||||
@@ -98,7 +98,9 @@ class EventBuilder:
|
|||||||
return self._state_key is not None
|
return self._state_key is not None
|
||||||
|
|
||||||
async def build(
|
async def build(
|
||||||
self, prev_event_ids: List[str], auth_event_ids: Optional[List[str]],
|
self,
|
||||||
|
prev_event_ids: List[str],
|
||||||
|
auth_event_ids: Optional[List[str]],
|
||||||
) -> EventBase:
|
) -> EventBase:
|
||||||
"""Transform into a fully signed and hashed event
|
"""Transform into a fully signed and hashed event
|
||||||
|
|
||||||
|
|||||||
@@ -341,8 +341,7 @@ def _encode_state_dict(state_dict):
|
|||||||
|
|
||||||
|
|
||||||
def _decode_state_dict(input):
|
def _decode_state_dict(input):
|
||||||
"""Decodes a state dict encoded using `_encode_state_dict` above
|
"""Decodes a state dict encoded using `_encode_state_dict` above"""
|
||||||
"""
|
|
||||||
if input is None:
|
if input is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,8 @@
|
|||||||
import inspect
|
import inspect
|
||||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
from synapse.rest.media.v1._base import FileInfo
|
||||||
|
from synapse.rest.media.v1.media_storage import ReadableFileWrapper
|
||||||
from synapse.spam_checker_api import RegistrationBehaviour
|
from synapse.spam_checker_api import RegistrationBehaviour
|
||||||
from synapse.types import Collection
|
from synapse.types import Collection
|
||||||
from synapse.util.async_helpers import maybe_awaitable
|
from synapse.util.async_helpers import maybe_awaitable
|
||||||
@@ -214,3 +216,48 @@ class SpamChecker:
|
|||||||
return behaviour
|
return behaviour
|
||||||
|
|
||||||
return RegistrationBehaviour.ALLOW
|
return RegistrationBehaviour.ALLOW
|
||||||
|
|
||||||
|
async def check_media_file_for_spam(
|
||||||
|
self, file_wrapper: ReadableFileWrapper, file_info: FileInfo
|
||||||
|
) -> bool:
|
||||||
|
"""Checks if a piece of newly uploaded media should be blocked.
|
||||||
|
|
||||||
|
This will be called for local uploads, downloads of remote media, each
|
||||||
|
thumbnail generated for those, and web pages/images used for URL
|
||||||
|
previews.
|
||||||
|
|
||||||
|
Note that care should be taken to not do blocking IO operations in the
|
||||||
|
main thread. For example, to get the contents of a file a module
|
||||||
|
should do::
|
||||||
|
|
||||||
|
async def check_media_file_for_spam(
|
||||||
|
self, file: ReadableFileWrapper, file_info: FileInfo
|
||||||
|
) -> bool:
|
||||||
|
buffer = BytesIO()
|
||||||
|
await file.write_chunks_to(buffer.write)
|
||||||
|
|
||||||
|
if buffer.getvalue() == b"Hello World":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file: An object that allows reading the contents of the media.
|
||||||
|
file_info: Metadata about the file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if the media should be blocked or False if it should be
|
||||||
|
allowed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
for spam_checker in self.spam_checkers:
|
||||||
|
# For backwards compatibility, only run if the method exists on the
|
||||||
|
# spam checker
|
||||||
|
checker = getattr(spam_checker, "check_media_file_for_spam", None)
|
||||||
|
if checker:
|
||||||
|
spam = await maybe_awaitable(checker(file_wrapper, file_info))
|
||||||
|
if spam:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|||||||
@@ -40,7 +40,8 @@ class ThirdPartyEventRules:
|
|||||||
|
|
||||||
if module is not None:
|
if module is not None:
|
||||||
self.third_party_rules = module(
|
self.third_party_rules = module(
|
||||||
config=config, module_api=hs.get_module_api(),
|
config=config,
|
||||||
|
module_api=hs.get_module_api(),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def check_event_allowed(
|
async def check_event_allowed(
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ SPLIT_FIELD_REGEX = re.compile(r"(?<!\\)\.")
|
|||||||
|
|
||||||
|
|
||||||
def prune_event(event: EventBase) -> EventBase:
|
def prune_event(event: EventBase) -> EventBase:
|
||||||
""" Returns a pruned version of the given event, which removes all keys we
|
"""Returns a pruned version of the given event, which removes all keys we
|
||||||
don't know about or think could potentially be dodgy.
|
don't know about or think could potentially be dodgy.
|
||||||
|
|
||||||
This is used when we "redact" an event. We want to remove all fields that
|
This is used when we "redact" an event. We want to remove all fields that
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user