Compare commits
465 Commits
v1.9.0.dev
...
anoa/mypy_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3dace4b1aa | ||
|
|
4ac60a17a5 | ||
|
|
f5fd9b98c7 | ||
|
|
8895c38202 | ||
|
|
e0ee1b2224 | ||
|
|
14c4f08f5c | ||
|
|
cb76e53b7f | ||
|
|
4cff617df1 | ||
|
|
7bab642707 | ||
|
|
b1cfaf08af | ||
|
|
39230d2171 | ||
|
|
1fcf9c6f95 | ||
|
|
d6828c129f | ||
|
|
c816072d47 | ||
|
|
190ab593b7 | ||
|
|
e341518f92 | ||
|
|
a564b92d37 | ||
|
|
5126cb1253 | ||
|
|
229eb81498 | ||
|
|
88bb6c27e1 | ||
|
|
066804f591 | ||
|
|
56b5f1d0ee | ||
|
|
a438950a00 | ||
|
|
2fa55c0cc6 | ||
|
|
b3cee0ce67 | ||
|
|
96071eea8f | ||
|
|
477c4f5b1c | ||
|
|
c165c1233b | ||
|
|
fdb1344716 | ||
|
|
caec7d4fa0 | ||
|
|
c2db6599c8 | ||
|
|
a319cb1dd1 | ||
|
|
c8c926f9c9 | ||
|
|
163f23785a | ||
|
|
5aa6dff99e | ||
|
|
e43e78b985 | ||
|
|
782b811789 | ||
|
|
e913823a22 | ||
|
|
8c75667ad7 | ||
|
|
443162e577 | ||
|
|
4a17a647a9 | ||
|
|
88b41986db | ||
|
|
6e6476ef07 | ||
|
|
6d110ddea4 | ||
|
|
c37db0211e | ||
|
|
4ce50519cd | ||
|
|
5e477c1deb | ||
|
|
7581d30e9f | ||
|
|
60724c46b7 | ||
|
|
6a35046363 | ||
|
|
7df04ca0e6 | ||
|
|
beb19cf61a | ||
|
|
d8d91983bc | ||
|
|
ebfcbbff9c | ||
|
|
77d0a4507b | ||
|
|
0de9f9486a | ||
|
|
f9e98176bf | ||
|
|
bd5e555b0d | ||
|
|
900bca9707 | ||
|
|
e55a240681 | ||
|
|
b8cfe79ffc | ||
|
|
8120a238a4 | ||
|
|
37a9873f63 | ||
|
|
e38c44b418 | ||
|
|
1cde4cf3f1 | ||
|
|
2dce68c651 | ||
|
|
9c0775e86a | ||
|
|
69ce55c510 | ||
|
|
54dd28621b | ||
|
|
751d51dd12 | ||
|
|
42ac4ca477 | ||
|
|
6640460d05 | ||
|
|
8f826f98ac | ||
|
|
dc6fb56c5f | ||
|
|
fe593ef990 | ||
|
|
5ec2077bf9 | ||
|
|
156f271867 | ||
|
|
51c094c4ac | ||
|
|
6b0efe73e2 | ||
|
|
39f6595b4a | ||
|
|
885134529f | ||
|
|
7e5f40e771 | ||
|
|
50ea178c20 | ||
|
|
04f4b5f6f8 | ||
|
|
14b2ebe767 | ||
|
|
f9e3a3f4d0 | ||
|
|
aee2bae952 | ||
|
|
87c65576e0 | ||
|
|
06eb5cae08 | ||
|
|
66315d862f | ||
|
|
bbf725e7da | ||
|
|
99bbe177b6 | ||
|
|
20545a2199 | ||
|
|
ce460dc31c | ||
|
|
fb078f921b | ||
|
|
1f5f3ae8b1 | ||
|
|
2bff4457d9 | ||
|
|
1d66dce83e | ||
|
|
54b78a0e3b | ||
|
|
297aaf4816 | ||
|
|
45df9d35a9 | ||
|
|
a27056d539 | ||
|
|
80e580ae92 | ||
|
|
87972f07e5 | ||
|
|
78a15b1f9d | ||
|
|
fe678a0900 | ||
|
|
83b6c69d3d | ||
|
|
31a2116331 | ||
|
|
13892776ef | ||
|
|
8ef8fb2c1c | ||
|
|
43f874055d | ||
|
|
6b0ef34706 | ||
|
|
fe6ab0439d | ||
|
|
fd983fad96 | ||
|
|
7dcbc33a1b | ||
|
|
6a8880b9c3 | ||
|
|
a0178df104 | ||
|
|
6f67a8b570 | ||
|
|
65c73cdfec | ||
|
|
809e8567f6 | ||
|
|
b68041df3d | ||
|
|
65a941d1f8 | ||
|
|
b29474e0aa | ||
|
|
27d099edd6 | ||
|
|
2e7fad87d4 | ||
|
|
b2bd54a2e3 | ||
|
|
3ab8e9c293 | ||
|
|
174aaa1d62 | ||
|
|
036c6cea07 | ||
|
|
bbeee33d63 | ||
|
|
e53744c737 | ||
|
|
cc7ab0d84a | ||
|
|
e4ffb14d57 | ||
|
|
d96ac97d29 | ||
|
|
12d4259000 | ||
|
|
f70f44abc7 | ||
|
|
59ad93d2a4 | ||
|
|
9ce4e344a8 | ||
|
|
f5caa1864e | ||
|
|
c3c6c0e622 | ||
|
|
9b06d8f8a6 | ||
|
|
ab0073a6c0 | ||
|
|
2201bc9795 | ||
|
|
cab4a52535 | ||
|
|
b32ac60c22 | ||
|
|
132b673dbe | ||
|
|
3e99528f2b | ||
|
|
380122866f | ||
|
|
1f773eec91 | ||
|
|
7728d87fd7 | ||
|
|
8c75b621bf | ||
|
|
c1156d3e2b | ||
|
|
e66f099ca9 | ||
|
|
bbf8886a05 | ||
|
|
4aea0bd292 | ||
|
|
691659568f | ||
|
|
a301934f46 | ||
|
|
4c2ed3f20e | ||
|
|
af6c389501 | ||
|
|
7b0e2d961c | ||
|
|
fcf4599488 | ||
|
|
7936d2a96e | ||
|
|
509e381afa | ||
|
|
272eee1ae1 | ||
|
|
4f7e4fc2fb | ||
|
|
1fcb9a1a7a | ||
|
|
0bd8cf435e | ||
|
|
9c1b83b007 | ||
|
|
8f6d9c4cf0 | ||
|
|
99eed85a77 | ||
|
|
a90d0dc5c2 | ||
|
|
4fb5f4d0ce | ||
|
|
7b7c3cedf2 | ||
|
|
fc87d2ffb3 | ||
|
|
2b37eabca1 | ||
|
|
0001e8397e | ||
|
|
197b08de35 | ||
|
|
099c96b89b | ||
|
|
2fb7794e60 | ||
|
|
bbe39f808c | ||
|
|
880aaac1d8 | ||
|
|
abf1e5c526 | ||
|
|
0d0bc35792 | ||
|
|
5e4a438556 | ||
|
|
71d65407e7 | ||
|
|
fa64f836ec | ||
|
|
5a5abd55e8 | ||
|
|
603618c002 | ||
|
|
709e81f518 | ||
|
|
a0a1fd0bec | ||
|
|
b58d17e44f | ||
|
|
771d70e89c | ||
|
|
f31a94a6dd | ||
|
|
61b457e3ec | ||
|
|
adfaea8c69 | ||
|
|
3f1cd14791 | ||
|
|
a0d2f9d089 | ||
|
|
d484126bf7 | ||
|
|
8a380d0fe2 | ||
|
|
818def8248 | ||
|
|
9801a042f3 | ||
|
|
bfbe2f5b08 | ||
|
|
7a782c32a2 | ||
|
|
b1255077f5 | ||
|
|
d009535639 | ||
|
|
ba7a523854 | ||
|
|
e837be5b5c | ||
|
|
3c67eee6dc | ||
|
|
fe3941f6e3 | ||
|
|
8ee0d74516 | ||
|
|
3be2abd0a9 | ||
|
|
bc831d1d9a | ||
|
|
0a714c3abf | ||
|
|
7718fabb7a | ||
|
|
fd6d83ed96 | ||
|
|
d2455ec3aa | ||
|
|
3404ad289b | ||
|
|
46fa66bbfd | ||
|
|
10027c80b0 | ||
|
|
5a78f47f6e | ||
|
|
9551911f88 | ||
|
|
43b2be9764 | ||
|
|
32873efa87 | ||
|
|
97a42bbc3a | ||
|
|
02e89021f5 | ||
|
|
49f877d32e | ||
|
|
ffe1fc111d | ||
|
|
79460ce9c9 | ||
|
|
71cc6bab5f | ||
|
|
36af094017 | ||
|
|
65bdc35a1f | ||
|
|
df1c98c22a | ||
|
|
f3f142259e | ||
|
|
0cb83cde70 | ||
|
|
ef9c275d96 | ||
|
|
12bbcc255a | ||
|
|
5820ed905f | ||
|
|
361de49c90 | ||
|
|
f48bf4febd | ||
|
|
dc3f998706 | ||
|
|
862669d6cc | ||
|
|
459d089af7 | ||
|
|
e88a5dd108 | ||
|
|
e45a7c0939 | ||
|
|
f092029d2d | ||
|
|
6cd34da8b1 | ||
|
|
d8994942f2 | ||
|
|
08e050c3fd | ||
|
|
47acbc519f | ||
|
|
d9239b5257 | ||
|
|
7b8d654a61 | ||
|
|
fdb816713a | ||
|
|
3dd2b5f5e3 | ||
|
|
ba547ec3a9 | ||
|
|
a0c4769f1a | ||
|
|
6b21986e4e | ||
|
|
705c978366 | ||
|
|
a443d2a25d | ||
|
|
88d41e94f5 | ||
|
|
856b2a9555 | ||
|
|
78d170262c | ||
|
|
aa7e4291ee | ||
|
|
9e45d573d4 | ||
|
|
605cd089f7 | ||
|
|
3edc65dd24 | ||
|
|
a92e703ab9 | ||
|
|
01209382fb | ||
|
|
3a3118f4ec | ||
|
|
db0fee738d | ||
|
|
3de57e7062 | ||
|
|
8e64c5a24c | ||
|
|
cc0800ebfc | ||
|
|
fe73f0d533 | ||
|
|
21db35f77e | ||
|
|
e1d858984d | ||
|
|
799001f2c0 | ||
|
|
b08b0a22d5 | ||
|
|
de2d267375 | ||
|
|
56ca93ef59 | ||
|
|
f4884444c3 | ||
|
|
e1b240329e | ||
|
|
7765bf3989 | ||
|
|
928edef979 | ||
|
|
b0c8bdd49d | ||
|
|
bce557175b | ||
|
|
99fcc96289 | ||
|
|
ed630ea17c | ||
|
|
9bcd37146e | ||
|
|
2201ef8556 | ||
|
|
f663118155 | ||
|
|
b5176166b7 | ||
|
|
4a50b674f2 | ||
|
|
6a7e90ad78 | ||
|
|
f0561fcffd | ||
|
|
5e019069ab | ||
|
|
39c2d26e0b | ||
|
|
ff70ec0a00 | ||
|
|
ee0525b2b2 | ||
|
|
f84700fba8 | ||
|
|
577f460369 | ||
|
|
6bbd890f05 | ||
|
|
146fec0820 | ||
|
|
a58860e480 | ||
|
|
60d0672426 | ||
|
|
a831d2e4e3 | ||
|
|
d88e0ec080 | ||
|
|
6475382d80 | ||
|
|
74bf3fdbb9 | ||
|
|
c87572d6e4 | ||
|
|
5ef91b96f1 | ||
|
|
c7d6d5c69e | ||
|
|
245ee14220 | ||
|
|
23d8a55c7a | ||
|
|
ea23210b2d | ||
|
|
4b4536dd02 | ||
|
|
6deeefb68c | ||
|
|
abadf44eb2 | ||
|
|
e88b90aaeb | ||
|
|
638001116d | ||
|
|
3960527c2e | ||
|
|
ad09ee9262 | ||
|
|
1330c311b7 | ||
|
|
a46fabf17b | ||
|
|
8af9f11bea | ||
|
|
3f11cbb404 | ||
|
|
24d814ca23 | ||
|
|
d73683c363 | ||
|
|
0cb0c7bcd5 | ||
|
|
0536d0c9be | ||
|
|
5d17c31596 | ||
|
|
e81c093974 | ||
|
|
b9391c9575 | ||
|
|
ae5b3104f0 | ||
|
|
e49eb1a886 | ||
|
|
f64c96662e | ||
|
|
52642860da | ||
|
|
814cc00cb9 | ||
|
|
05299599b6 | ||
|
|
3b7e0e002b | ||
|
|
4286e429a7 | ||
|
|
c3f296af32 | ||
|
|
dbdf843012 | ||
|
|
ebd6a15af3 | ||
|
|
94f7b4cd54 | ||
|
|
863087d186 | ||
|
|
957129f4a7 | ||
|
|
0d5f2f4bb0 | ||
|
|
a25ddf26a3 | ||
|
|
bc9b75c6f0 | ||
|
|
8033b257a7 | ||
|
|
1cdc253e0a | ||
|
|
c556ed9e15 | ||
|
|
6e89ec5e32 | ||
|
|
d184cbc031 | ||
|
|
98681f90cb | ||
|
|
af8ba6b525 | ||
|
|
7571bf86f0 | ||
|
|
b3e44f0bdf | ||
|
|
370080531e | ||
|
|
b0d112e78b | ||
|
|
68ef7ebbef | ||
|
|
0f8ffa38b5 | ||
|
|
ac0d45b78b | ||
|
|
83b0ea047b | ||
|
|
7f93eb1903 | ||
|
|
a5afdd15e5 | ||
|
|
160522e32c | ||
|
|
f6fa2c0b31 | ||
|
|
08f41a6f05 | ||
|
|
d7bf793cc1 | ||
|
|
7d846e8704 | ||
|
|
540c5e168b | ||
|
|
2a81393a4b | ||
|
|
54f3f369bd | ||
|
|
ef6bdafb29 | ||
|
|
46a446828d | ||
|
|
e0992fcc5b | ||
|
|
184303b865 | ||
|
|
57ad702af0 | ||
|
|
b660327056 | ||
|
|
c3d4ad8afd | ||
|
|
a5bab2d058 | ||
|
|
c80a9fe13d | ||
|
|
5a246611e3 | ||
|
|
a855b7c3a8 | ||
|
|
281551f720 | ||
|
|
750d4d7599 | ||
|
|
dcd85b976d | ||
|
|
b36095ae5c | ||
|
|
ee42a5513e | ||
|
|
6b9e1014cf | ||
|
|
611215a49c | ||
|
|
2cad8baa70 | ||
|
|
fcfb591b31 | ||
|
|
cc109b79dd | ||
|
|
a1f307f7d1 | ||
|
|
e17a110661 | ||
|
|
fbe0a82c0d | ||
|
|
99e205fc21 | ||
|
|
49d3bca37b | ||
|
|
a8ce7aeb43 | ||
|
|
02b44db922 | ||
|
|
33f904835a | ||
|
|
77d9357226 | ||
|
|
bdbeeb94ec | ||
|
|
8df862e45d | ||
|
|
d5275fc55f | ||
|
|
f74d178b17 | ||
|
|
cf9d56e5cf | ||
|
|
1fe5001369 | ||
|
|
9f7aaf90b5 | ||
|
|
aa6ad288f1 | ||
|
|
fa4d609e20 | ||
|
|
51fc3f693e | ||
|
|
9bae740527 | ||
|
|
1755326d8a | ||
|
|
1dc5a791cf | ||
|
|
ba64c3b615 | ||
|
|
f3eac2b3e9 | ||
|
|
6b7462a13f | ||
|
|
5bd3cb7260 | ||
|
|
04345338e1 | ||
|
|
d31f5f4d89 | ||
|
|
ce84dd9e20 | ||
|
|
33f7e5ce2a | ||
|
|
91085ef49e | ||
|
|
ffa637050d | ||
|
|
0d0f32bc53 | ||
|
|
90a28fb475 | ||
|
|
ae6cf586b0 | ||
|
|
6ae0c8db33 | ||
|
|
d9a8728b11 | ||
|
|
67aa18e8dc | ||
|
|
ed83c3a018 | ||
|
|
aa9b00fb2f | ||
|
|
5e52d8563b | ||
|
|
5d7a6ad223 | ||
|
|
2093f83ea0 | ||
|
|
837f62266b | ||
|
|
07124d028d | ||
|
|
0e68760078 | ||
|
|
b0a66ab83c | ||
|
|
74b74462f1 | ||
|
|
0f6e525be3 | ||
|
|
ceecedc68b | ||
|
|
e9e066055f | ||
|
|
351fdfede6 | ||
|
|
2f23eb27b3 | ||
|
|
11c23af465 | ||
|
|
026f4bdf3c | ||
|
|
198d52da3a | ||
|
|
a17f64361c | ||
|
|
5909751936 | ||
|
|
0b885d62ef | ||
|
|
722b4f302d | ||
|
|
3b72bb780a | ||
|
|
4fb3cb208a | ||
|
|
dac148341b | ||
|
|
842c2cfbf1 | ||
|
|
e601f35d3b | ||
|
|
48e57a6452 | ||
|
|
914e73cdd9 | ||
|
|
066b9f52b8 | ||
|
|
8363588237 | ||
|
|
855af069a4 |
@@ -1,22 +0,0 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.5
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/src
|
||||
@@ -1,22 +0,0 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:11
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/src
|
||||
@@ -1,22 +0,0 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/src
|
||||
13
.buildkite/scripts/test_old_deps.sh
Executable file
13
.buildkite/scripts/test_old_deps.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
# this script is run by buildkite in a plain `xenial` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py35-old tox environment.
|
||||
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev tox
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
exec tox -e py35-old,combine
|
||||
@@ -39,3 +39,5 @@ Server correctly handles incoming m.device_list_update
|
||||
|
||||
# this fails reliably with a torture level of 100 due to https://github.com/matrix-org/synapse/issues/6536
|
||||
Outbound federation requests missing prev_events and then asks for /state_ids and resolves the state
|
||||
|
||||
Can get rooms/{roomId}/members at a given point
|
||||
|
||||
434
CHANGES.md
434
CHANGES.md
@@ -1,3 +1,437 @@
|
||||
Synapse 1.12.0 (2020-03-23)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.12.0rc1.
|
||||
|
||||
Debian packages and Docker images are rebuilt using the latest versions of
|
||||
dependency libraries, including Twisted 20.3.0. **Please see security advisory
|
||||
below**.
|
||||
|
||||
Security advisory
|
||||
-----------------
|
||||
|
||||
Synapse may be vulnerable to request-smuggling attacks when it is used with a
|
||||
reverse-proxy. The vulnerabilties are fixed in Twisted 20.3.0, and are
|
||||
described in
|
||||
[CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108)
|
||||
and
|
||||
[CVE-2020-10109](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10109).
|
||||
For a good introduction to this class of request-smuggling attacks, see
|
||||
https://portswigger.net/research/http-desync-attacks-request-smuggling-reborn.
|
||||
|
||||
We are not aware of these vulnerabilities being exploited in the wild, and
|
||||
do not believe that they are exploitable with current versions of any reverse
|
||||
proxies. Nevertheless, we recommend that all Synapse administrators ensure that
|
||||
they have the latest versions of the Twisted library to ensure that their
|
||||
installation remains secure.
|
||||
|
||||
* Administrators using the [`matrix.org` Docker
|
||||
image](https://hub.docker.com/r/matrixdotorg/synapse/) or the [Debian/Ubuntu
|
||||
packages from
|
||||
`matrix.org`](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#matrixorg-packages)
|
||||
should ensure that they have version 1.12.0 installed: these images include
|
||||
Twisted 20.3.0.
|
||||
* Administrators who have [installed Synapse from
|
||||
source](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#installing-from-source)
|
||||
should upgrade Twisted within their virtualenv by running:
|
||||
```sh
|
||||
<path_to_virtualenv>/bin/pip install 'Twisted>=20.3.0'
|
||||
```
|
||||
* Administrators who have installed Synapse from distribution packages should
|
||||
consult the information from their distributions.
|
||||
|
||||
The `matrix.org` Synapse instance was not vulnerable to these vulnerabilities.
|
||||
|
||||
Advance notice of change to the default `git` branch for Synapse
|
||||
----------------------------------------------------------------
|
||||
|
||||
Currently, the default `git` branch for Synapse is `master`, which tracks the
|
||||
latest release.
|
||||
|
||||
After the release of Synapse 1.13.0, we intend to change this default to
|
||||
`develop`, which is the development tip. This is more consistent with common
|
||||
practice and modern `git` usage.
|
||||
|
||||
Although we try to keep `develop` in a stable state, there may be occasions
|
||||
where regressions creep in. Developers and distributors who have scripts which
|
||||
run builds using the default branch of `Synapse` should therefore consider
|
||||
pinning their scripts to `master`.
|
||||
|
||||
|
||||
Synapse 1.12.0rc1 (2020-03-19)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Changes related to room alias management ([MSC2432](https://github.com/matrix-org/matrix-doc/pull/2432)):
|
||||
- Publishing/removing a room from the room directory now requires the user to have a power level capable of modifying the canonical alias, instead of the room aliases. ([\#6965](https://github.com/matrix-org/synapse/issues/6965))
|
||||
- Validate the `alt_aliases` property of canonical alias events. ([\#6971](https://github.com/matrix-org/synapse/issues/6971))
|
||||
- Users with a power level sufficient to modify the canonical alias of a room can now delete room aliases. ([\#6986](https://github.com/matrix-org/synapse/issues/6986))
|
||||
- Implement updated authorization rules and redaction rules for aliases events, from [MSC2261](https://github.com/matrix-org/matrix-doc/pull/2261) and [MSC2432](https://github.com/matrix-org/matrix-doc/pull/2432). ([\#7037](https://github.com/matrix-org/synapse/issues/7037))
|
||||
- Stop sending m.room.aliases events during room creation and upgrade. ([\#6941](https://github.com/matrix-org/synapse/issues/6941))
|
||||
- Synapse no longer uses room alias events to calculate room names for push notifications. ([\#6966](https://github.com/matrix-org/synapse/issues/6966))
|
||||
- The room list endpoint no longer returns a list of aliases. ([\#6970](https://github.com/matrix-org/synapse/issues/6970))
|
||||
- Remove special handling of aliases events from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260) added in v1.10.0rc1. ([\#7034](https://github.com/matrix-org/synapse/issues/7034))
|
||||
- Expose the `synctl`, `hash_password` and `generate_config` commands in the snapcraft package. Contributed by @devec0. ([\#6315](https://github.com/matrix-org/synapse/issues/6315))
|
||||
- Check that server_name is correctly set before running database updates. ([\#6982](https://github.com/matrix-org/synapse/issues/6982))
|
||||
- Break down monthly active users by `appservice_id` and emit via Prometheus. ([\#7030](https://github.com/matrix-org/synapse/issues/7030))
|
||||
- Render a configurable and comprehensible error page if something goes wrong during the SAML2 authentication process. ([\#7058](https://github.com/matrix-org/synapse/issues/7058), [\#7067](https://github.com/matrix-org/synapse/issues/7067))
|
||||
- Add an optional parameter to control whether other sessions are logged out when a user's password is modified. ([\#7085](https://github.com/matrix-org/synapse/issues/7085))
|
||||
- Add prometheus metrics for the number of active pushers. ([\#7103](https://github.com/matrix-org/synapse/issues/7103), [\#7106](https://github.com/matrix-org/synapse/issues/7106))
|
||||
- Improve performance when making HTTPS requests to sygnal, sydent, etc, by sharing the SSL context object between connections. ([\#7094](https://github.com/matrix-org/synapse/issues/7094))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- When a user's profile is updated via the admin API, also generate a displayname/avatar update for that user in each room. ([\#6572](https://github.com/matrix-org/synapse/issues/6572))
|
||||
- Fix a couple of bugs in email configuration handling. ([\#6962](https://github.com/matrix-org/synapse/issues/6962))
|
||||
- Fix an issue affecting worker-based deployments where replication would stop working, necessitating a full restart, after joining a large room. ([\#6967](https://github.com/matrix-org/synapse/issues/6967))
|
||||
- Fix `duplicate key` error which was logged when rejoining a room over federation. ([\#6968](https://github.com/matrix-org/synapse/issues/6968))
|
||||
- Prevent user from setting 'deactivated' to anything other than a bool on the v2 PUT /users Admin API. ([\#6990](https://github.com/matrix-org/synapse/issues/6990))
|
||||
- Fix py35-old CI by using native tox package. ([\#7018](https://github.com/matrix-org/synapse/issues/7018))
|
||||
- Fix a bug causing `org.matrix.dummy_event` to be included in responses from `/sync`. ([\#7035](https://github.com/matrix-org/synapse/issues/7035))
|
||||
- Fix a bug that renders UTF-8 text files incorrectly when loaded from media. Contributed by @TheStranjer. ([\#7044](https://github.com/matrix-org/synapse/issues/7044))
|
||||
- Fix a bug that would cause Synapse to respond with an error about event visibility if a client tried to request the state of a room at a given token. ([\#7066](https://github.com/matrix-org/synapse/issues/7066))
|
||||
- Repair a data-corruption issue which was introduced in Synapse 1.10, and fixed in Synapse 1.11, and which could cause `/sync` to return with 404 errors about missing events and unknown rooms. ([\#7070](https://github.com/matrix-org/synapse/issues/7070))
|
||||
- Fix a bug causing account validity renewal emails to be sent even if the feature is turned off in some cases. ([\#7074](https://github.com/matrix-org/synapse/issues/7074))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Updated CentOS8 install instructions. Contributed by Richard Kellner. ([\#6925](https://github.com/matrix-org/synapse/issues/6925))
|
||||
- Fix `POSTGRES_INITDB_ARGS` in the `contrib/docker/docker-compose.yml` example docker-compose configuration. ([\#6984](https://github.com/matrix-org/synapse/issues/6984))
|
||||
- Change date in [INSTALL.md](./INSTALL.md#tls-certificates) for last date of getting TLS certificates to November 2019. ([\#7015](https://github.com/matrix-org/synapse/issues/7015))
|
||||
- Document that the fallback auth endpoints must be routed to the same worker node as the register endpoints. ([\#7048](https://github.com/matrix-org/synapse/issues/7048))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove the unused query_auth federation endpoint per [MSC2451](https://github.com/matrix-org/matrix-doc/pull/2451). ([\#7026](https://github.com/matrix-org/synapse/issues/7026))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add type hints to `logging/context.py`. ([\#6309](https://github.com/matrix-org/synapse/issues/6309))
|
||||
- Add some clarifications to `README.md` in the database schema directory. ([\#6615](https://github.com/matrix-org/synapse/issues/6615))
|
||||
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003))
|
||||
- Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095))
|
||||
- Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953))
|
||||
- Minor perfermance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
|
||||
- Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956))
|
||||
- Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957))
|
||||
- Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104))
|
||||
- Remove redundant `store_room` call from `FederationHandler._process_received_pdu`. ([\#6979](https://github.com/matrix-org/synapse/issues/6979))
|
||||
- Update warning for incorrect database collation/ctype to include link to documentation. ([\#6985](https://github.com/matrix-org/synapse/issues/6985))
|
||||
- Add some type annotations to the database storage classes. ([\#6987](https://github.com/matrix-org/synapse/issues/6987))
|
||||
- Port `synapse.handlers.presence` to async/await. ([\#6991](https://github.com/matrix-org/synapse/issues/6991), [\#7019](https://github.com/matrix-org/synapse/issues/7019))
|
||||
- Add some type annotations to the federation base & client classes. ([\#6995](https://github.com/matrix-org/synapse/issues/6995))
|
||||
- Port `synapse.rest.keys` to async/await. ([\#7020](https://github.com/matrix-org/synapse/issues/7020))
|
||||
- Add a type check to `is_verified` when processing room keys. ([\#7045](https://github.com/matrix-org/synapse/issues/7045))
|
||||
- Add type annotations and comments to the auth handler. ([\#7063](https://github.com/matrix-org/synapse/issues/7063))
|
||||
|
||||
|
||||
Synapse 1.11.1 (2020-03-03)
|
||||
===========================
|
||||
|
||||
This release includes a security fix impacting installations using Single Sign-On (i.e. SAML2 or CAS) for authentication. Administrators of such installations are encouraged to upgrade as soon as possible.
|
||||
|
||||
The release also includes fixes for a couple of other bugs.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Add a confirmation step to the SSO login flow before redirecting users to the redirect URL. ([b2bd54a2](https://github.com/matrix-org/synapse/commit/b2bd54a2e31d9a248f73fadb184ae9b4cbdb49f9), [65c73cdf](https://github.com/matrix-org/synapse/commit/65c73cdfec1876a9fec2fd2c3a74923cd146fe0b), [a0178df1](https://github.com/matrix-org/synapse/commit/a0178df10422a76fd403b82d2b2a4ed28a9a9d1e))
|
||||
- Fixed set a user as an admin with the admin API `PUT /_synapse/admin/v2/users/<user_id>`. Contributed by @dklimpel. ([\#6910](https://github.com/matrix-org/synapse/issues/6910))
|
||||
- Fix bug introduced in Synapse 1.11.0 which sometimes caused errors when joining rooms over federation, with `'coroutine' object has no attribute 'event_id'`. ([\#6996](https://github.com/matrix-org/synapse/issues/6996))
|
||||
|
||||
|
||||
Synapse 1.11.0 (2020-02-21)
|
||||
===========================
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Small grammatical fixes to the ACME v1 deprecation notice. ([\#6944](https://github.com/matrix-org/synapse/issues/6944))
|
||||
|
||||
|
||||
Synapse 1.11.0rc1 (2020-02-19)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Admin API to add or modify threepids of user accounts. ([\#6769](https://github.com/matrix-org/synapse/issues/6769))
|
||||
- Limit the number of events that can be requested by the backfill federation API to 100. ([\#6864](https://github.com/matrix-org/synapse/issues/6864))
|
||||
- Add ability to run some group APIs on workers. ([\#6866](https://github.com/matrix-org/synapse/issues/6866))
|
||||
- Reject device display names over 100 characters in length to prevent abuse. ([\#6882](https://github.com/matrix-org/synapse/issues/6882))
|
||||
- Add ability to route federation user device queries to workers. ([\#6873](https://github.com/matrix-org/synapse/issues/6873))
|
||||
- The result of a user directory search can now be filtered via the spam checker. ([\#6888](https://github.com/matrix-org/synapse/issues/6888))
|
||||
- Implement new `GET /_matrix/client/unstable/org.matrix.msc2432/rooms/{roomId}/aliases` endpoint as per [MSC2432](https://github.com/matrix-org/matrix-doc/pull/2432). ([\#6939](https://github.com/matrix-org/synapse/issues/6939), [\#6948](https://github.com/matrix-org/synapse/issues/6948), [\#6949](https://github.com/matrix-org/synapse/issues/6949))
|
||||
- Stop sending `m.room.alias` events wheng adding / removing aliases. Check `alt_aliases` in the latest `m.room.canonical_alias` event when deleting an alias. ([\#6904](https://github.com/matrix-org/synapse/issues/6904))
|
||||
- Change the default power levels of invites, tombstones and server ACLs for new rooms. ([\#6834](https://github.com/matrix-org/synapse/issues/6834))
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fixed third party event rules function `on_create_room`'s return value being ignored. ([\#6781](https://github.com/matrix-org/synapse/issues/6781))
|
||||
- Allow URL-encoded User IDs on `/_synapse/admin/v2/users/<user_id>[/admin]` endpoints. Thanks to @NHAS for reporting. ([\#6825](https://github.com/matrix-org/synapse/issues/6825))
|
||||
- Fix Synapse refusing to start if `federation_certificate_verification_whitelist` option is blank. ([\#6849](https://github.com/matrix-org/synapse/issues/6849))
|
||||
- Fix errors from logging in the purge jobs related to the message retention policies support. ([\#6945](https://github.com/matrix-org/synapse/issues/6945))
|
||||
- Return a 404 instead of 200 for querying information of a non-existant user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- The deprecated "generate-config-on-the-fly" mode is no longer supported. ([\#6918](https://github.com/matrix-org/synapse/issues/6918))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add details of PR merge strategy to contributing docs. ([\#6846](https://github.com/matrix-org/synapse/issues/6846))
|
||||
- Spell out that the last event sent to a room won't be deleted by a purge. ([\#6891](https://github.com/matrix-org/synapse/issues/6891))
|
||||
- Update Synapse's documentation to warn about the deprecation of ACME v1. ([\#6905](https://github.com/matrix-org/synapse/issues/6905), [\#6907](https://github.com/matrix-org/synapse/issues/6907), [\#6909](https://github.com/matrix-org/synapse/issues/6909))
|
||||
- Add documentation for the spam checker. ([\#6906](https://github.com/matrix-org/synapse/issues/6906))
|
||||
- Fix worker docs to point `/publicised_groups` API correctly. ([\#6938](https://github.com/matrix-org/synapse/issues/6938))
|
||||
- Clean up and update docs on setting up federation. ([\#6940](https://github.com/matrix-org/synapse/issues/6940))
|
||||
- Add a warning about indentation to generated configuration files. ([\#6920](https://github.com/matrix-org/synapse/issues/6920))
|
||||
- Databases created using the compose file in contrib/docker will now always have correct encoding and locale settings. Contributed by Fridtjof Mund. ([\#6921](https://github.com/matrix-org/synapse/issues/6921))
|
||||
- Update pip install directions in readme to avoid error when using zsh. ([\#6855](https://github.com/matrix-org/synapse/issues/6855))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove `m.lazy_load_members` from `unstable_features` since lazy loading is in the stable Client-Server API version r0.5.0. ([\#6877](https://github.com/matrix-org/synapse/issues/6877))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add type hints to `SyncHandler`. ([\#6821](https://github.com/matrix-org/synapse/issues/6821))
|
||||
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6823](https://github.com/matrix-org/synapse/issues/6823), [\#6827](https://github.com/matrix-org/synapse/issues/6827), [\#6854](https://github.com/matrix-org/synapse/issues/6854), [\#6856](https://github.com/matrix-org/synapse/issues/6856), [\#6857](https://github.com/matrix-org/synapse/issues/6857), [\#6858](https://github.com/matrix-org/synapse/issues/6858))
|
||||
- Fix stacktraces when using `ObservableDeferred` and async/await. ([\#6836](https://github.com/matrix-org/synapse/issues/6836))
|
||||
- Port much of `synapse.handlers.federation` to async/await. ([\#6837](https://github.com/matrix-org/synapse/issues/6837), [\#6840](https://github.com/matrix-org/synapse/issues/6840))
|
||||
- Populate `rooms.room_version` database column at startup, rather than in a background update. ([\#6847](https://github.com/matrix-org/synapse/issues/6847))
|
||||
- Reduce amount we log at `INFO` level. ([\#6833](https://github.com/matrix-org/synapse/issues/6833), [\#6862](https://github.com/matrix-org/synapse/issues/6862))
|
||||
- Remove unused `get_room_stats_state` method. ([\#6869](https://github.com/matrix-org/synapse/issues/6869))
|
||||
- Add typing to `synapse.federation.sender` and port to async/await. ([\#6871](https://github.com/matrix-org/synapse/issues/6871))
|
||||
- Refactor `_EventInternalMetadata` object to improve type safety. ([\#6872](https://github.com/matrix-org/synapse/issues/6872))
|
||||
- Add an additional entry to the SyTest blacklist for worker mode. ([\#6883](https://github.com/matrix-org/synapse/issues/6883))
|
||||
- Fix the use of sed in the linting scripts when using BSD sed. ([\#6887](https://github.com/matrix-org/synapse/issues/6887))
|
||||
- Add type hints to the spam checker module. ([\#6915](https://github.com/matrix-org/synapse/issues/6915))
|
||||
- Convert the directory handler tests to use HomeserverTestCase. ([\#6919](https://github.com/matrix-org/synapse/issues/6919))
|
||||
- Increase DB/CPU perf of `_is_server_still_joined` check. ([\#6936](https://github.com/matrix-org/synapse/issues/6936))
|
||||
- Tiny optimisation for incoming HTTP request dispatch. ([\#6950](https://github.com/matrix-org/synapse/issues/6950))
|
||||
|
||||
|
||||
Synapse 1.10.1 (2020-02-17)
|
||||
===========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.10.0 which would cause room state to be cleared in the database if Synapse was upgraded direct from 1.2.1 or earlier to 1.10.0. ([\#6924](https://github.com/matrix-org/synapse/issues/6924))
|
||||
|
||||
|
||||
Synapse 1.10.0 (2020-02-12)
|
||||
===========================
|
||||
|
||||
**WARNING to client developers**: As of this release Synapse validates `client_secret` parameters in the Client-Server API as per the spec. See [\#6766](https://github.com/matrix-org/synapse/issues/6766) for details.
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Update the docker images to Alpine Linux 3.11. ([\#6897](https://github.com/matrix-org/synapse/issues/6897))
|
||||
|
||||
|
||||
Synapse 1.10.0rc5 (2020-02-11)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix the filtering introduced in 1.10.0rc3 to also apply to the state blocks returned by `/sync`. ([\#6884](https://github.com/matrix-org/synapse/issues/6884))
|
||||
|
||||
Synapse 1.10.0rc4 (2020-02-11)
|
||||
==============================
|
||||
|
||||
This release candidate was built incorrectly and is superceded by 1.10.0rc5.
|
||||
|
||||
Synapse 1.10.0rc3 (2020-02-10)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Filter out `m.room.aliases` from the CS API to mitigate abuse while a better solution is specced. ([\#6878](https://github.com/matrix-org/synapse/issues/6878))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Fix continuous integration failures with old versions of `pip`, which were introduced by a release of the `zipp` library. ([\#6880](https://github.com/matrix-org/synapse/issues/6880))
|
||||
|
||||
|
||||
Synapse 1.10.0rc2 (2020-02-06)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix an issue with cross-signing where device signatures were not sent to remote servers. ([\#6844](https://github.com/matrix-org/synapse/issues/6844))
|
||||
- Fix to the unknown remote device detection which was introduced in 1.10.rc1. ([\#6848](https://github.com/matrix-org/synapse/issues/6848))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Detect unexpected sender keys on remote encrypted events and resync device lists. ([\#6850](https://github.com/matrix-org/synapse/issues/6850))
|
||||
|
||||
|
||||
Synapse 1.10.0rc1 (2020-01-31)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add experimental support for updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). ([\#6787](https://github.com/matrix-org/synapse/issues/6787), [\#6790](https://github.com/matrix-org/synapse/issues/6790), [\#6794](https://github.com/matrix-org/synapse/issues/6794))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Warn if postgres database has a non-C locale, as that can cause issues when upgrading locales (e.g. due to upgrading OS). ([\#6734](https://github.com/matrix-org/synapse/issues/6734))
|
||||
- Minor fixes to `PUT /_synapse/admin/v2/users` admin api. ([\#6761](https://github.com/matrix-org/synapse/issues/6761))
|
||||
- Validate `client_secret` parameter using the regex provided by the Client-Server API, temporarily allowing `:` characters for older clients. The `:` character will be removed in a future release. ([\#6767](https://github.com/matrix-org/synapse/issues/6767))
|
||||
- Fix persisting redaction events that have been redacted (or otherwise don't have a redacts key). ([\#6771](https://github.com/matrix-org/synapse/issues/6771))
|
||||
- Fix outbound federation request metrics. ([\#6795](https://github.com/matrix-org/synapse/issues/6795))
|
||||
- Fix bug where querying a remote user's device keys that weren't cached resulted in only returning a single device. ([\#6796](https://github.com/matrix-org/synapse/issues/6796))
|
||||
- Fix race in federation sender worker that delayed sending of device updates. ([\#6799](https://github.com/matrix-org/synapse/issues/6799), [\#6800](https://github.com/matrix-org/synapse/issues/6800))
|
||||
- Fix bug where Synapse didn't invalidate cache of remote users' devices when Synapse left a room. ([\#6801](https://github.com/matrix-org/synapse/issues/6801))
|
||||
- Fix waking up other workers when remote server is detected to have come back online. ([\#6811](https://github.com/matrix-org/synapse/issues/6811))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Clarify documentation related to `user_dir` and `federation_reader` workers. ([\#6775](https://github.com/matrix-org/synapse/issues/6775))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Record room versions in the `rooms` table. ([\#6729](https://github.com/matrix-org/synapse/issues/6729), [\#6788](https://github.com/matrix-org/synapse/issues/6788), [\#6810](https://github.com/matrix-org/synapse/issues/6810))
|
||||
- Propagate cache invalidates from workers to other workers. ([\#6748](https://github.com/matrix-org/synapse/issues/6748))
|
||||
- Remove some unnecessary admin handler abstraction methods. ([\#6751](https://github.com/matrix-org/synapse/issues/6751))
|
||||
- Add some debugging for media storage providers. ([\#6757](https://github.com/matrix-org/synapse/issues/6757))
|
||||
- Detect unknown remote devices and mark cache as stale. ([\#6776](https://github.com/matrix-org/synapse/issues/6776), [\#6819](https://github.com/matrix-org/synapse/issues/6819))
|
||||
- Attempt to resync remote users' devices when detected as stale. ([\#6786](https://github.com/matrix-org/synapse/issues/6786))
|
||||
- Delete current state from the database when server leaves a room. ([\#6792](https://github.com/matrix-org/synapse/issues/6792))
|
||||
- When a client asks for a remote user's device keys check if the local cache for that user has been marked as potentially stale. ([\#6797](https://github.com/matrix-org/synapse/issues/6797))
|
||||
- Add background update to clean out left rooms from current state. ([\#6802](https://github.com/matrix-org/synapse/issues/6802), [\#6816](https://github.com/matrix-org/synapse/issues/6816))
|
||||
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6803](https://github.com/matrix-org/synapse/issues/6803), [\#6805](https://github.com/matrix-org/synapse/issues/6805), [\#6806](https://github.com/matrix-org/synapse/issues/6806), [\#6807](https://github.com/matrix-org/synapse/issues/6807), [\#6820](https://github.com/matrix-org/synapse/issues/6820))
|
||||
|
||||
|
||||
Synapse 1.9.1 (2020-01-28)
|
||||
==========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix bug where setting `mau_limit_reserved_threepids` config would cause Synapse to refuse to start. ([\#6793](https://github.com/matrix-org/synapse/issues/6793))
|
||||
|
||||
|
||||
Synapse 1.9.0 (2020-01-23)
|
||||
==========================
|
||||
|
||||
**WARNING**: As of this release, Synapse no longer supports versions of SQLite before 3.11, and will refuse to start when configured to use an older version. Administrators are recommended to migrate their database to Postgres (see instructions [here](docs/postgres.md)).
|
||||
|
||||
If your Synapse deployment uses workers, note that the reverse-proxy configurations for the `synapse.app.media_repository`, `synapse.app.federation_reader` and `synapse.app.event_creator` workers have changed, with the addition of a few paths (see the updated configurations [here](docs/workers.md#available-worker-applications)). Existing configurations will continue to work.
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix endpoint documentation for the List Rooms admin API. ([\#6770](https://github.com/matrix-org/synapse/issues/6770))
|
||||
|
||||
|
||||
Synapse 1.9.0rc1 (2020-01-22)
|
||||
=============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Allow admin to create or modify a user. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#5742](https://github.com/matrix-org/synapse/issues/5742))
|
||||
- Add new quarantine media admin APIs to quarantine by media ID or by user who uploaded the media. ([\#6681](https://github.com/matrix-org/synapse/issues/6681), [\#6756](https://github.com/matrix-org/synapse/issues/6756))
|
||||
- Add `org.matrix.e2e_cross_signing` to `unstable_features` in `/versions` as per [MSC1756](https://github.com/matrix-org/matrix-doc/pull/1756). ([\#6712](https://github.com/matrix-org/synapse/issues/6712))
|
||||
- Add a new admin API to list and filter rooms on the server. ([\#6720](https://github.com/matrix-org/synapse/issues/6720))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Correctly proxy HTTP errors due to API calls to remote group servers. ([\#6654](https://github.com/matrix-org/synapse/issues/6654))
|
||||
- Fix media repo admin APIs when using a media worker. ([\#6664](https://github.com/matrix-org/synapse/issues/6664))
|
||||
- Fix "CRITICAL" errors being logged when a request is received for a uri containing non-ascii characters. ([\#6682](https://github.com/matrix-org/synapse/issues/6682))
|
||||
- Fix a bug where we would assign a numeric user ID if somebody tried registering with an empty username. ([\#6690](https://github.com/matrix-org/synapse/issues/6690))
|
||||
- Fix `purge_room` admin API. ([\#6711](https://github.com/matrix-org/synapse/issues/6711))
|
||||
- Fix a bug causing Synapse to not always purge quiet rooms with a low `max_lifetime` in their message retention policies when running the automated purge jobs. ([\#6714](https://github.com/matrix-org/synapse/issues/6714))
|
||||
- Fix the `synapse_port_db` not correctly running background updates. Thanks @tadzik for reporting. ([\#6718](https://github.com/matrix-org/synapse/issues/6718))
|
||||
- Fix changing password via user admin API. ([\#6730](https://github.com/matrix-org/synapse/issues/6730))
|
||||
- Fix `/events/:event_id` deprecated API. ([\#6731](https://github.com/matrix-org/synapse/issues/6731))
|
||||
- Fix monthly active user limiting support for worker mode, fixes [#4639](https://github.com/matrix-org/synapse/issues/4639). ([\#6742](https://github.com/matrix-org/synapse/issues/6742))
|
||||
- Fix bug when setting `account_validity` to an empty block in the config. Thanks to @Sorunome for reporting. ([\#6747](https://github.com/matrix-org/synapse/issues/6747))
|
||||
- Fix `AttributeError: 'NoneType' object has no attribute 'get'` in `hash_password` when configuration has an empty `password_config`. Contributed by @ivilata. ([\#6753](https://github.com/matrix-org/synapse/issues/6753))
|
||||
- Fix the `docker-compose.yaml` overriding the entire `/etc` folder of the container. Contributed by Fabian Meyer. ([\#6656](https://github.com/matrix-org/synapse/issues/6656))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix a typo in the configuration example for purge jobs in the sample configuration file. ([\#6621](https://github.com/matrix-org/synapse/issues/6621))
|
||||
- Add complete documentation of the message retention policies support. ([\#6624](https://github.com/matrix-org/synapse/issues/6624), [\#6665](https://github.com/matrix-org/synapse/issues/6665))
|
||||
- Add some helpful tips about changelog entries to the GitHub pull request template. ([\#6663](https://github.com/matrix-org/synapse/issues/6663))
|
||||
- Clarify the `account_validity` and `email` sections of the sample configuration. ([\#6685](https://github.com/matrix-org/synapse/issues/6685))
|
||||
- Add more endpoints to the documentation for Synapse workers. ([\#6698](https://github.com/matrix-org/synapse/issues/6698))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Synapse no longer supports versions of SQLite before 3.11, and will refuse to start when configured to use an older version. Administrators are recommended to migrate their database to Postgres (see instructions [here](docs/postgres.md)). ([\#6675](https://github.com/matrix-org/synapse/issues/6675))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add `local_current_membership` table for tracking local user membership state in rooms. ([\#6655](https://github.com/matrix-org/synapse/issues/6655), [\#6728](https://github.com/matrix-org/synapse/issues/6728))
|
||||
- Port `synapse.replication.tcp` to async/await. ([\#6666](https://github.com/matrix-org/synapse/issues/6666))
|
||||
- Fixup `synapse.replication` to pass mypy checks. ([\#6667](https://github.com/matrix-org/synapse/issues/6667))
|
||||
- Allow `additional_resources` to implement `IResource` directly. ([\#6686](https://github.com/matrix-org/synapse/issues/6686))
|
||||
- Allow REST endpoint implementations to raise a `RedirectException`, which will redirect the user's browser to a given location. ([\#6687](https://github.com/matrix-org/synapse/issues/6687))
|
||||
- Updates and extensions to the module API. ([\#6688](https://github.com/matrix-org/synapse/issues/6688))
|
||||
- Updates to the SAML mapping provider API. ([\#6689](https://github.com/matrix-org/synapse/issues/6689), [\#6723](https://github.com/matrix-org/synapse/issues/6723))
|
||||
- Remove redundant `RegistrationError` class. ([\#6691](https://github.com/matrix-org/synapse/issues/6691))
|
||||
- Don't block processing of incoming EDUs behind processing PDUs in the same transaction. ([\#6697](https://github.com/matrix-org/synapse/issues/6697))
|
||||
- Remove duplicate check for the `session` query parameter on the `/auth/xxx/fallback/web` Client-Server endpoint. ([\#6702](https://github.com/matrix-org/synapse/issues/6702))
|
||||
- Attempt to retry sending a transaction when we detect a remote server has come back online, rather than waiting for a transaction to be triggered by new data. ([\#6706](https://github.com/matrix-org/synapse/issues/6706))
|
||||
- Add `StateMap` type alias to simplify types. ([\#6715](https://github.com/matrix-org/synapse/issues/6715))
|
||||
- Add a `DeltaState` to track changes to be made to current state during event persistence. ([\#6716](https://github.com/matrix-org/synapse/issues/6716))
|
||||
- Add more logging around message retention policies support. ([\#6717](https://github.com/matrix-org/synapse/issues/6717))
|
||||
- When processing a SAML response, log the assertions for easier configuration. ([\#6724](https://github.com/matrix-org/synapse/issues/6724))
|
||||
- Fixup `synapse.rest` to pass mypy. ([\#6732](https://github.com/matrix-org/synapse/issues/6732), [\#6764](https://github.com/matrix-org/synapse/issues/6764))
|
||||
- Fixup `synapse.api` to pass mypy. ([\#6733](https://github.com/matrix-org/synapse/issues/6733))
|
||||
- Allow streaming cache 'invalidate all' to workers. ([\#6749](https://github.com/matrix-org/synapse/issues/6749))
|
||||
- Remove unused CI docker compose files. ([\#6754](https://github.com/matrix-org/synapse/issues/6754))
|
||||
|
||||
|
||||
Synapse 1.8.0 (2020-01-09)
|
||||
==========================
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ python 3.6 and to install each tool:
|
||||
|
||||
```
|
||||
# Install the dependencies
|
||||
pip install -U black flake8 isort
|
||||
pip install -U black flake8 flake8-comprehensions isort
|
||||
|
||||
# Run the linter script
|
||||
./scripts-dev/lint.sh
|
||||
@@ -200,6 +200,20 @@ Git allows you to add this signoff automatically when using the `-s`
|
||||
flag to `git commit`, which uses the name and email set in your
|
||||
`user.name` and `user.email` git configs.
|
||||
|
||||
## Merge Strategy
|
||||
|
||||
We use the commit history of develop/master extensively to identify
|
||||
when regressions were introduced and what changes have been made.
|
||||
|
||||
We aim to have a clean merge history, which means we normally squash-merge
|
||||
changes into develop. For small changes this means there is no need to rebase
|
||||
to clean up your PR before merging. Larger changes with an organised set of
|
||||
commits may be merged as-is, if the history is judged to be useful.
|
||||
|
||||
This use of squash-merging will mean PRs built on each other will be hard to
|
||||
merge. We suggest avoiding these where possible, and if required, ensuring
|
||||
each PR has a tidy set of commits to ease merging.
|
||||
|
||||
## Conclusion
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect
|
||||
|
||||
138
INSTALL.md
138
INSTALL.md
@@ -2,7 +2,6 @@
|
||||
- [Installing Synapse](#installing-synapse)
|
||||
- [Installing from source](#installing-from-source)
|
||||
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
- [Prebuilt packages](#prebuilt-packages)
|
||||
- [Setting up Synapse](#setting-up-synapse)
|
||||
- [TLS certificates](#tls-certificates)
|
||||
@@ -10,6 +9,7 @@
|
||||
- [Registering a user](#registering-a-user)
|
||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
- [URL previews](#url-previews)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
|
||||
# Choosing your server name
|
||||
|
||||
@@ -70,7 +70,7 @@ pip install -U matrix-synapse
|
||||
```
|
||||
|
||||
Before you can start Synapse, you will need to generate a configuration
|
||||
file. To do this, run (in your virtualenv, as before)::
|
||||
file. To do this, run (in your virtualenv, as before):
|
||||
|
||||
```
|
||||
cd ~/synapse
|
||||
@@ -84,22 +84,24 @@ python -m synapse.app.homeserver \
|
||||
... substituting an appropriate value for `--server-name`.
|
||||
|
||||
This command will generate you a config file that you can then customise, but it will
|
||||
also generate a set of keys for you. These keys will allow your Home Server to
|
||||
identify itself to other Home Servers, so don't lose or delete them. It would be
|
||||
also generate a set of keys for you. These keys will allow your homeserver to
|
||||
identify itself to other homeserver, so don't lose or delete them. It would be
|
||||
wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
||||
change your Home Server's keys, you may find that other Home Servers have the
|
||||
change your homeserver's keys, you may find that other homeserver have the
|
||||
old key cached. If you update the signing key, you should change the name of the
|
||||
key in the `<server name>.signing.key` file (the second word) to something
|
||||
different. See the
|
||||
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||
for more information on key management.)
|
||||
for more information on key management).
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to
|
||||
run (e.g. `~/synapse`), and::
|
||||
run (e.g. `~/synapse`), and:
|
||||
|
||||
cd ~/synapse
|
||||
source env/bin/activate
|
||||
synctl start
|
||||
```
|
||||
cd ~/synapse
|
||||
source env/bin/activate
|
||||
synctl start
|
||||
```
|
||||
|
||||
### Platform-Specific Instructions
|
||||
|
||||
@@ -124,12 +126,21 @@ sudo pacman -S base-devel python python-pip \
|
||||
|
||||
#### CentOS/Fedora
|
||||
|
||||
Installing prerequisites on CentOS 7 or Fedora 25:
|
||||
Installing prerequisites on CentOS 8 or Fedora>26:
|
||||
|
||||
```
|
||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
libwebp-devel tk-devel redhat-rpm-config \
|
||||
python3-virtualenv libffi-devel openssl-devel
|
||||
sudo dnf groupinstall "Development Tools"
|
||||
```
|
||||
|
||||
Installing prerequisites on CentOS 7 or Fedora<=25:
|
||||
|
||||
```
|
||||
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
||||
python-virtualenv libffi-devel openssl-devel
|
||||
python3-virtualenv libffi-devel openssl-devel
|
||||
sudo yum groupinstall "Development Tools"
|
||||
```
|
||||
|
||||
@@ -179,7 +190,7 @@ doas pkg_add python libffi py-pip py-setuptools sqlite3 py-virtualenv \
|
||||
There is currently no port for OpenBSD. Additionally, OpenBSD's security
|
||||
settings require a slightly more difficult installation process.
|
||||
|
||||
XXX: I suspect this is out of date.
|
||||
(XXX: I suspect this is out of date)
|
||||
|
||||
1. Create a new directory in `/usr/local` called `_synapse`. Also, create a
|
||||
new user called `_synapse` and set that directory as the new user's home.
|
||||
@@ -187,7 +198,7 @@ XXX: I suspect this is out of date.
|
||||
write and execute permissions on the same memory space to be run from
|
||||
`/usr/local`.
|
||||
2. `su` to the new `_synapse` user and change to their home directory.
|
||||
3. Create a new virtualenv: `virtualenv -p python2.7 ~/.synapse`
|
||||
3. Create a new virtualenv: `virtualenv -p python3 ~/.synapse`
|
||||
4. Source the virtualenv configuration located at
|
||||
`/usr/local/_synapse/.synapse/bin/activate`. This is done in `ksh` by
|
||||
using the `.` command, rather than `bash`'s `source`.
|
||||
@@ -208,45 +219,6 @@ be found at https://docs.microsoft.com/en-us/windows/wsl/install-win10 for
|
||||
Windows 10 and https://docs.microsoft.com/en-us/windows/wsl/install-on-server
|
||||
for Windows Server.
|
||||
|
||||
### Troubleshooting Installation
|
||||
|
||||
XXX a bunch of this is no longer relevant.
|
||||
|
||||
Synapse requires pip 8 or later, so if your OS provides too old a version you
|
||||
may need to manually upgrade it::
|
||||
|
||||
sudo pip install --upgrade pip
|
||||
|
||||
Installing may fail with `Could not find any downloads that satisfy the requirement pymacaroons-pynacl (from matrix-synapse==0.12.0)`.
|
||||
You can fix this by manually upgrading pip and virtualenv::
|
||||
|
||||
sudo pip install --upgrade virtualenv
|
||||
|
||||
You can next rerun `virtualenv -p python3 synapse` to update the virtual env.
|
||||
|
||||
Installing may fail during installing virtualenv with `InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.`
|
||||
You can fix this by manually installing ndg-httpsclient::
|
||||
|
||||
pip install --upgrade ndg-httpsclient
|
||||
|
||||
Installing may fail with `mock requires setuptools>=17.1. Aborting installation`.
|
||||
You can fix this by upgrading setuptools::
|
||||
|
||||
pip install --upgrade setuptools
|
||||
|
||||
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
|
||||
refuse to run until you remove the temporary installation directory it
|
||||
created. To reset the installation::
|
||||
|
||||
rm -rf /tmp/pip_install_matrix
|
||||
|
||||
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
failing, e.g.::
|
||||
|
||||
pip install twisted
|
||||
|
||||
## Prebuilt packages
|
||||
|
||||
As an alternative to installing from source, prebuilt packages are available
|
||||
@@ -305,7 +277,7 @@ For `buster` and `sid`, Synapse is available in the Debian repositories and
|
||||
it should be possible to install it with simply:
|
||||
|
||||
```
|
||||
sudo apt install matrix-synapse
|
||||
sudo apt install matrix-synapse
|
||||
```
|
||||
|
||||
There is also a version of `matrix-synapse` in `stretch-backports`. Please see
|
||||
@@ -366,15 +338,17 @@ sudo pip install py-bcrypt
|
||||
|
||||
Synapse can be found in the void repositories as 'synapse':
|
||||
|
||||
xbps-install -Su
|
||||
xbps-install -S synapse
|
||||
```
|
||||
xbps-install -Su
|
||||
xbps-install -S synapse
|
||||
```
|
||||
|
||||
### FreeBSD
|
||||
|
||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||
|
||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||
- Packages: `pkg install py27-matrix-synapse`
|
||||
- Packages: `pkg install py37-matrix-synapse`
|
||||
|
||||
|
||||
### NixOS
|
||||
@@ -388,15 +362,17 @@ Once you have installed synapse as above, you will need to configure it.
|
||||
|
||||
## TLS certificates
|
||||
|
||||
The default configuration exposes a single HTTP port: http://localhost:8008. It
|
||||
is suitable for local testing, but for any practical use, you will either need
|
||||
to enable a reverse proxy, or configure Synapse to expose an HTTPS port.
|
||||
The default configuration exposes a single HTTP port on the local
|
||||
interface: `http://localhost:8008`. It is suitable for local testing,
|
||||
but for any practical use, you will need Synapse's APIs to be served
|
||||
over HTTPS.
|
||||
|
||||
For information on using a reverse proxy, see
|
||||
The recommended way to do so is to set up a reverse proxy on port
|
||||
`8448`. You can find documentation on doing so in
|
||||
[docs/reverse_proxy.md](docs/reverse_proxy.md).
|
||||
|
||||
To configure Synapse to expose an HTTPS port, you will need to edit
|
||||
`homeserver.yaml`, as follows:
|
||||
Alternatively, you can configure Synapse to expose an HTTPS port. To do
|
||||
so, you will need to edit `homeserver.yaml`, as follows:
|
||||
|
||||
* First, under the `listeners` section, uncomment the configuration for the
|
||||
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
||||
@@ -409,19 +385,23 @@ To configure Synapse to expose an HTTPS port, you will need to edit
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
```
|
||||
|
||||
* You will also need to uncomment the `tls_certificate_path` and
|
||||
`tls_private_key_path` lines under the `TLS` section. You can either
|
||||
point these settings at an existing certificate and key, or you can
|
||||
enable Synapse's built-in ACME (Let's Encrypt) support. Instructions
|
||||
for having Synapse automatically provision and renew federation
|
||||
certificates through ACME can be found at [ACME.md](docs/ACME.md). If you
|
||||
are using your own certificate, be sure to use a `.pem` file that includes
|
||||
the full certificate chain including any intermediate certificates (for
|
||||
instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||
certificates through ACME can be found at [ACME.md](docs/ACME.md).
|
||||
Note that, as pointed out in that document, this feature will not
|
||||
work with installs set up after November 2019.
|
||||
|
||||
If you are using your own certificate, be sure to use a `.pem` file that
|
||||
includes the full certificate chain including any intermediate certificates
|
||||
(for instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||
`cert.pem`).
|
||||
|
||||
For a more detailed guide to configuring your server for federation, see
|
||||
[federate.md](docs/federate.md)
|
||||
[federate.md](docs/federate.md).
|
||||
|
||||
|
||||
## Email
|
||||
@@ -468,7 +448,7 @@ on your server even if `enable_registration` is `false`.
|
||||
## Setting up a TURN server
|
||||
|
||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
||||
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
||||
|
||||
## URL previews
|
||||
|
||||
@@ -477,10 +457,24 @@ turn it on you must enable the `url_preview_enabled: True` config parameter
|
||||
and explicitly specify the IP ranges that Synapse is not allowed to spider for
|
||||
previewing in the `url_preview_ip_range_blacklist` configuration parameter.
|
||||
This is critical from a security perspective to stop arbitrary Matrix users
|
||||
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||
your loopback and RFC1918 IP addresses are blacklisted.
|
||||
|
||||
This also requires the optional lxml and netaddr python dependencies to be
|
||||
installed. This in turn requires the libxml2 library to be available - on
|
||||
This also requires the optional `lxml` and `netaddr` python dependencies to be
|
||||
installed. This in turn requires the `libxml2` library to be available - on
|
||||
Debian/Ubuntu this means `apt-get install libxml2-dev`, or equivalent for
|
||||
your OS.
|
||||
|
||||
# Troubleshooting Installation
|
||||
|
||||
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
failing, e.g.:
|
||||
|
||||
```
|
||||
pip install twisted
|
||||
```
|
||||
|
||||
If you have any other problems, feel free to ask in
|
||||
[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org).
|
||||
|
||||
@@ -272,7 +272,7 @@ to install using pip and a virtualenv::
|
||||
|
||||
virtualenv -p python3 env
|
||||
source env/bin/activate
|
||||
python -m pip install --no-use-pep517 -e .[all]
|
||||
python -m pip install --no-use-pep517 -e ".[all]"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
|
||||
@@ -76,6 +76,15 @@ for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
|
||||
|
||||
Upgrading to v1.10.0
|
||||
====================
|
||||
|
||||
Synapse will now log a warning on start up if used with a PostgreSQL database
|
||||
that has a non-recommended locale set.
|
||||
|
||||
See `docs/postgres.md <docs/postgres.md>`_ for details.
|
||||
|
||||
|
||||
Upgrading to v1.8.0
|
||||
===================
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Allow admin to create or modify a user. Contributed by Awesome Technologies Innovationslabor GmbH.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a typo in the configuration example for purge jobs in the sample configuration file.
|
||||
@@ -1 +0,0 @@
|
||||
Add complete documentation of the message retention policies support.
|
||||
1
changelog.d/6634.bugfix
Normal file
1
changelog.d/6634.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix single-sign on with CAS systems: pass the same service URL when requesting the CAS ticket and when calling the `proxyValidate` URL. Contributed by @Naugrimm.
|
||||
@@ -1 +0,0 @@
|
||||
Correctly proxy HTTP errors due to API calls to remote group servers.
|
||||
@@ -1 +0,0 @@
|
||||
Add `local_current_membership` table for tracking local user membership state in rooms.
|
||||
@@ -1 +0,0 @@
|
||||
No more overriding the entire /etc folder of the container in docker-compose.yaml. Contributed by Fabian Meyer.
|
||||
@@ -1 +0,0 @@
|
||||
Add some helpful tips about changelog entries to the github pull request template.
|
||||
@@ -1 +0,0 @@
|
||||
Fix media repo admin APIs when using a media worker.
|
||||
@@ -1 +0,0 @@
|
||||
Add complete documentation of the message retention policies support.
|
||||
@@ -1 +0,0 @@
|
||||
Port `synapse.replication.tcp` to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Fixup `synapse.replication` to pass mypy checks.
|
||||
@@ -1 +0,0 @@
|
||||
Synapse no longer supports versions of SQLite before 3.11, and will refuse to start when configured to use an older version. Administrators are recommended to migrate their database to Postgres (see instructions [here](docs/postgres.md)).
|
||||
@@ -1 +0,0 @@
|
||||
Add new quarantine media admin APIs to quarantine by media ID or by user who uploaded the media.
|
||||
@@ -1,2 +0,0 @@
|
||||
Fix "CRITICAL" errors being logged when a request is received for a uri containing non-ascii characters.
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Clarify the `account_validity` and `email` sections of the sample configuration.
|
||||
@@ -1 +0,0 @@
|
||||
Allow additional_resources to implement IResource directly.
|
||||
@@ -1 +0,0 @@
|
||||
Allow REST endpoint implementations to raise a RedirectException, which will redirect the user's browser to a given location.
|
||||
@@ -1 +0,0 @@
|
||||
Updates and extensions to the module API.
|
||||
@@ -1 +0,0 @@
|
||||
Updates to the SAML mapping provider API.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug where we would assign a numeric userid if somebody tried registering with an empty username.
|
||||
@@ -1 +0,0 @@
|
||||
Remove redundant RegistrationError class.
|
||||
@@ -1 +0,0 @@
|
||||
Don't block processing of incoming EDUs behind processing PDUs in the same transaction.
|
||||
@@ -1 +0,0 @@
|
||||
Add more endpoints to the documentation for Synapse workers.
|
||||
@@ -1 +0,0 @@
|
||||
Remove duplicate check for the `session` query parameter on the `/auth/xxx/fallback/web` Client-Server endpoint.
|
||||
@@ -1 +0,0 @@
|
||||
Attempt to retry sending a transaction when we detect a remote server has come back online, rather than waiting for a transaction to be triggered by new data.
|
||||
@@ -1 +0,0 @@
|
||||
Fix `purge_room` admin API.
|
||||
@@ -1 +0,0 @@
|
||||
Add org.matrix.e2e_cross_signing to unstable_features in /versions as per [MSC1756](https://github.com/matrix-org/matrix-doc/pull/1756).
|
||||
@@ -1 +0,0 @@
|
||||
Add StateMap type alias to simplify types.
|
||||
@@ -1 +0,0 @@
|
||||
Updates to the SAML mapping provider API.
|
||||
@@ -1 +0,0 @@
|
||||
When processing a SAML response, log the assertions for easier configuration.
|
||||
1
changelog.d/6988.doc
Normal file
1
changelog.d/6988.doc
Normal file
@@ -0,0 +1 @@
|
||||
Improve the documentation for database configuration.
|
||||
1
changelog.d/7009.feature
Normal file
1
changelog.d/7009.feature
Normal file
@@ -0,0 +1 @@
|
||||
Set `Referrer-Policy` header to `no-referrer` on media downloads.
|
||||
1
changelog.d/7010.misc
Normal file
1
changelog.d/7010.misc
Normal file
@@ -0,0 +1 @@
|
||||
Change device list streams to have one row per ID.
|
||||
1
changelog.d/7011.misc
Normal file
1
changelog.d/7011.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove concept of a non-limited stream.
|
||||
1
changelog.d/7024.misc
Normal file
1
changelog.d/7024.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move catchup of replication streams logic to worker.
|
||||
1
changelog.d/7089.bugfix
Normal file
1
changelog.d/7089.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug in the federation API which could cause occasional "Failed to get PDU" errors.
|
||||
1
changelog.d/7107.doc
Normal file
1
changelog.d/7107.doc
Normal file
@@ -0,0 +1 @@
|
||||
Update pre-built package name for FreeBSD.
|
||||
1
changelog.d/7109.bugfix
Normal file
1
changelog.d/7109.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Return the proper error (M_BAD_ALIAS) when a non-existant canonical alias is provided.
|
||||
1
changelog.d/7110.misc
Normal file
1
changelog.d/7110.misc
Normal file
@@ -0,0 +1 @@
|
||||
Convert some of synapse.rest.media to async/await.
|
||||
1
changelog.d/7115.misc
Normal file
1
changelog.d/7115.misc
Normal file
@@ -0,0 +1 @@
|
||||
De-duplicate / remove unused REST code for login and auth.
|
||||
1
changelog.d/7116.misc
Normal file
1
changelog.d/7116.misc
Normal file
@@ -0,0 +1 @@
|
||||
Convert `*StreamRow` classes to inner classes.
|
||||
1
changelog.d/7117.bugfix
Normal file
1
changelog.d/7117.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug which meant that groups updates were not correctly replicated between workers.
|
||||
1
changelog.d/7120.misc
Normal file
1
changelog.d/7120.misc
Normal file
@@ -0,0 +1 @@
|
||||
Clean up some LoggingContext code.
|
||||
1
changelog.d/7133.bugfix
Normal file
1
changelog.d/7133.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix starting workers when federation sending not split out.
|
||||
1
changelog.d/7141.doc
Normal file
1
changelog.d/7141.doc
Normal file
@@ -0,0 +1 @@
|
||||
Clean up INSTALL.md a bit.
|
||||
1
changelog.d/7143.bugfix
Normal file
1
changelog.d/7143.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Prevent `sqlite3` module from being imported even when using the postgres backend.
|
||||
@@ -15,10 +15,9 @@ services:
|
||||
restart: unless-stopped
|
||||
# See the readme for a full documentation of the environment settings
|
||||
environment:
|
||||
- SYNAPSE_CONFIG_PATH=/etc/homeserver.yaml
|
||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
||||
volumes:
|
||||
# You may either store all the files in a local folder
|
||||
- ./matrix-config/homeserver.yaml:/etc/homeserver.yaml
|
||||
- ./files:/data
|
||||
# .. or you may split this between different storage points
|
||||
# - ./files:/data
|
||||
@@ -56,6 +55,9 @@ services:
|
||||
environment:
|
||||
- POSTGRES_USER=synapse
|
||||
- POSTGRES_PASSWORD=changeme
|
||||
# ensure the database gets created correctly
|
||||
# https://github.com/matrix-org/synapse/blob/master/docs/postgres.md#set-up-database
|
||||
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
||||
volumes:
|
||||
# You may store the database tables in a local folder..
|
||||
- ./schemas:/var/lib/postgresql/data
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Using the Synapse Grafana dashboard
|
||||
|
||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
||||
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst
|
||||
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
|
||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
||||
3. Set up additional recording rules
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": 1,
|
||||
"iteration": 1561447718159,
|
||||
"iteration": 1584612489167,
|
||||
"links": [
|
||||
{
|
||||
"asDropdown": true,
|
||||
@@ -34,6 +34,7 @@
|
||||
"panels": [
|
||||
{
|
||||
"collapsed": false,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -52,12 +53,14 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 1
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 75,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -72,7 +75,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -151,6 +156,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
@@ -158,6 +164,7 @@
|
||||
"x": 12,
|
||||
"y": 1
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 33,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -172,7 +179,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -302,12 +311,14 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 0,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 10
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 107,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -322,7 +333,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -425,12 +438,14 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 0,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 19
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 118,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -445,7 +460,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -542,6 +559,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -1361,6 +1379,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -1732,6 +1751,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -2439,6 +2459,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -2635,6 +2656,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -2650,11 +2672,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 61
|
||||
"y": 33
|
||||
},
|
||||
"id": 79,
|
||||
"legend": {
|
||||
@@ -2670,6 +2693,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -2684,8 +2710,13 @@
|
||||
"expr": "sum(rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "txn rate",
|
||||
"legendFormat": "successful txn rate",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "sum(rate(synapse_util_metrics_block_count{block_name=\"_send_new_transaction\",instance=\"$instance\"}[$bucket_size]) - ignoring (block_name) rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
|
||||
"legendFormat": "failed txn rate",
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
@@ -2736,11 +2767,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 61
|
||||
"y": 33
|
||||
},
|
||||
"id": 83,
|
||||
"legend": {
|
||||
@@ -2756,6 +2788,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -2829,11 +2864,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 70
|
||||
"y": 42
|
||||
},
|
||||
"id": 109,
|
||||
"legend": {
|
||||
@@ -2849,6 +2885,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -2923,11 +2962,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 70
|
||||
"y": 42
|
||||
},
|
||||
"id": 111,
|
||||
"legend": {
|
||||
@@ -2943,6 +2983,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3009,6 +3052,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -3024,12 +3068,14 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 62
|
||||
"y": 34
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 51,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -3044,6 +3090,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3112,6 +3161,95 @@
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"description": "",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 34
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 134,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideZero": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 2,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"expr": "topk(10,synapse_pushers{job=~\"$job\",index=~\"$index\", instance=\"$instance\"})",
|
||||
"legendFormat": "{{kind}} {{app_id}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Active pusher instances by app",
|
||||
"tooltip": {
|
||||
"shared": false,
|
||||
"sort": 2,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"repeat": null,
|
||||
@@ -3120,6 +3258,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -3523,6 +3662,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -3540,6 +3680,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
@@ -3562,6 +3703,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3630,6 +3774,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
@@ -3652,6 +3797,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3720,6 +3868,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
@@ -3742,6 +3891,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3810,6 +3962,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
@@ -3832,6 +3985,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3921,6 +4077,7 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -4010,6 +4167,7 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -4076,6 +4234,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -4540,6 +4699,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -5060,6 +5220,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -5079,7 +5240,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 67
|
||||
"y": 39
|
||||
},
|
||||
"id": 2,
|
||||
"legend": {
|
||||
@@ -5095,6 +5256,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5198,7 +5360,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 67
|
||||
"y": 39
|
||||
},
|
||||
"id": 41,
|
||||
"legend": {
|
||||
@@ -5214,6 +5376,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5286,7 +5449,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 74
|
||||
"y": 46
|
||||
},
|
||||
"id": 42,
|
||||
"legend": {
|
||||
@@ -5302,6 +5465,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5373,7 +5537,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 74
|
||||
"y": 46
|
||||
},
|
||||
"id": 43,
|
||||
"legend": {
|
||||
@@ -5389,6 +5553,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5460,7 +5625,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 81
|
||||
"y": 53
|
||||
},
|
||||
"id": 113,
|
||||
"legend": {
|
||||
@@ -5476,6 +5641,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5546,7 +5712,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 81
|
||||
"y": 53
|
||||
},
|
||||
"id": 115,
|
||||
"legend": {
|
||||
@@ -5562,6 +5728,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5573,7 +5740,7 @@
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(synapse_replication_tcp_protocol_close_reason{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_replication_tcp_protocol_close_reason{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{job}}-{{index}} {{reason_type}}",
|
||||
@@ -5628,6 +5795,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -5643,11 +5811,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 13
|
||||
"y": 40
|
||||
},
|
||||
"id": 67,
|
||||
"legend": {
|
||||
@@ -5663,7 +5832,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5679,7 +5850,7 @@
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{job}}-{{index}} ",
|
||||
"legendFormat": "{{job}}-{{index}} {{name}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
@@ -5731,11 +5902,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 13
|
||||
"y": 40
|
||||
},
|
||||
"id": 71,
|
||||
"legend": {
|
||||
@@ -5751,7 +5923,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5819,11 +5993,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 22
|
||||
"y": 49
|
||||
},
|
||||
"id": 121,
|
||||
"interval": "",
|
||||
@@ -5840,7 +6015,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5909,6 +6086,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -6607,7 +6785,7 @@
|
||||
}
|
||||
],
|
||||
"refresh": "5m",
|
||||
"schemaVersion": 18,
|
||||
"schemaVersion": 22,
|
||||
"style": "dark",
|
||||
"tags": [
|
||||
"matrix"
|
||||
@@ -6616,7 +6794,7 @@
|
||||
"list": [
|
||||
{
|
||||
"current": {
|
||||
"tags": [],
|
||||
"selected": true,
|
||||
"text": "Prometheus",
|
||||
"value": "Prometheus"
|
||||
},
|
||||
@@ -6638,6 +6816,7 @@
|
||||
"auto_count": 100,
|
||||
"auto_min": "30s",
|
||||
"current": {
|
||||
"selected": false,
|
||||
"text": "auto",
|
||||
"value": "$__auto_interval_bucket_size"
|
||||
},
|
||||
@@ -6719,9 +6898,9 @@
|
||||
"allFormat": "regex wildcard",
|
||||
"allValue": "",
|
||||
"current": {
|
||||
"text": "All",
|
||||
"text": "synapse",
|
||||
"value": [
|
||||
"$__all"
|
||||
"synapse"
|
||||
]
|
||||
},
|
||||
"datasource": "$datasource",
|
||||
@@ -6751,7 +6930,9 @@
|
||||
"allValue": ".*",
|
||||
"current": {
|
||||
"text": "All",
|
||||
"value": "$__all"
|
||||
"value": [
|
||||
"$__all"
|
||||
]
|
||||
},
|
||||
"datasource": "$datasource",
|
||||
"definition": "",
|
||||
@@ -6810,5 +6991,5 @@
|
||||
"timezone": "",
|
||||
"title": "Synapse",
|
||||
"uid": "000000012",
|
||||
"version": 10
|
||||
"version": 19
|
||||
}
|
||||
42
debian/changelog
vendored
42
debian/changelog
vendored
@@ -1,3 +1,45 @@
|
||||
matrix-synapse-py3 (1.12.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.12.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 23 Mar 2020 12:13:03 +0000
|
||||
|
||||
matrix-synapse-py3 (1.11.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.11.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Mar 2020 15:01:22 +0000
|
||||
|
||||
matrix-synapse-py3 (1.11.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.11.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 21 Feb 2020 08:54:34 +0000
|
||||
|
||||
matrix-synapse-py3 (1.10.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.10.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Feb 2020 16:27:28 +0000
|
||||
|
||||
matrix-synapse-py3 (1.10.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.10.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 12 Feb 2020 12:18:54 +0000
|
||||
|
||||
matrix-synapse-py3 (1.9.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.9.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2020 13:09:23 +0000
|
||||
|
||||
matrix-synapse-py3 (1.9.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.9.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 23 Jan 2020 12:56:31 +0000
|
||||
|
||||
matrix-synapse-py3 (1.8.0) stable; urgency=medium
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
|
||||
@@ -16,7 +16,7 @@ ARG PYTHON_VERSION=3.7
|
||||
###
|
||||
### Stage 0: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.10 as builder
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.11 as builder
|
||||
|
||||
# install the OS build deps
|
||||
|
||||
|
||||
@@ -110,12 +110,12 @@ argument to `docker run`.
|
||||
|
||||
## Legacy dynamic configuration file support
|
||||
|
||||
For backwards-compatibility only, the docker image supports creating a dynamic
|
||||
configuration file based on environment variables. This is now deprecated, but
|
||||
is enabled when the `SYNAPSE_SERVER_NAME` variable is set (and `generate` is
|
||||
not given).
|
||||
The docker image used to support creating a dynamic configuration file based
|
||||
on environment variables. This is no longer supported, and an error will be
|
||||
raised if you try to run synapse without a config file.
|
||||
|
||||
To migrate from a dynamic configuration file to a static one, run the docker
|
||||
It is, however, possible to generate a static configuration file based on
|
||||
the environment variables that were previously used. To do this, run the docker
|
||||
container once with the environment variables set, and `migrate_config`
|
||||
command line option. For example:
|
||||
|
||||
@@ -127,15 +127,20 @@ docker run -it --rm \
|
||||
matrixdotorg/synapse:latest migrate_config
|
||||
```
|
||||
|
||||
This will generate the same configuration file as the legacy mode used, but
|
||||
will store it in `/data/homeserver.yaml` instead of a temporary location. You
|
||||
can then use it as shown above at [Running synapse](#running-synapse).
|
||||
This will generate the same configuration file as the legacy mode used, and
|
||||
will store it in `/data/homeserver.yaml`. You can then use it as shown above at
|
||||
[Running synapse](#running-synapse).
|
||||
|
||||
Note that the defaults used in this configuration file may be different to
|
||||
those when generating a new config file with `generate`: for example, TLS is
|
||||
enabled by default in this mode. You are encouraged to inspect the generated
|
||||
configuration file and edit it to ensure it meets your needs.
|
||||
|
||||
## Building the image
|
||||
|
||||
If you need to build the image from a Synapse checkout, use the following `docker
|
||||
build` command from the repo's root:
|
||||
|
||||
|
||||
```
|
||||
docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
```
|
||||
|
||||
@@ -188,11 +188,6 @@ def main(args, environ):
|
||||
else:
|
||||
ownership = "{}:{}".format(desired_uid, desired_gid)
|
||||
|
||||
log(
|
||||
"Container running as UserID %s:%s, ENV (or defaults) requests %s:%s"
|
||||
% (os.getuid(), os.getgid(), desired_uid, desired_gid)
|
||||
)
|
||||
|
||||
if ownership is None:
|
||||
log("Will not perform chmod/su-exec as UserID already matches request")
|
||||
|
||||
@@ -213,38 +208,30 @@ def main(args, environ):
|
||||
if mode is not None:
|
||||
error("Unknown execution mode '%s'" % (mode,))
|
||||
|
||||
if "SYNAPSE_SERVER_NAME" in environ:
|
||||
# backwards-compatibility generate-a-config-on-the-fly mode
|
||||
if "SYNAPSE_CONFIG_PATH" in environ:
|
||||
error(
|
||||
"SYNAPSE_SERVER_NAME can only be combined with SYNAPSE_CONFIG_PATH "
|
||||
"in `generate` or `migrate_config` mode. To start synapse using a "
|
||||
"config file, unset the SYNAPSE_SERVER_NAME environment variable."
|
||||
)
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||
|
||||
config_path = "/compiled/homeserver.yaml"
|
||||
log(
|
||||
"Generating config file '%s' on-the-fly from environment variables.\n"
|
||||
"Note that this mode is deprecated. You can migrate to a static config\n"
|
||||
"file by running with 'migrate_config'. See the README for more details."
|
||||
% (config_path,)
|
||||
)
|
||||
|
||||
generate_config_from_template("/compiled", config_path, environ, ownership)
|
||||
else:
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get(
|
||||
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
||||
)
|
||||
if not os.path.exists(config_path):
|
||||
if not os.path.exists(config_path):
|
||||
if "SYNAPSE_SERVER_NAME" in environ:
|
||||
error(
|
||||
"Config file '%s' does not exist. You should either create a new "
|
||||
"config file by running with the `generate` argument (and then edit "
|
||||
"the resulting file before restarting) or specify the path to an "
|
||||
"existing config file with the SYNAPSE_CONFIG_PATH variable."
|
||||
"""\
|
||||
Config file '%s' does not exist.
|
||||
|
||||
The synapse docker image no longer supports generating a config file on-the-fly
|
||||
based on environment variables. You can migrate to a static config file by
|
||||
running with 'migrate_config'. See the README for more details.
|
||||
"""
|
||||
% (config_path,)
|
||||
)
|
||||
|
||||
error(
|
||||
"Config file '%s' does not exist. You should either create a new "
|
||||
"config file by running with the `generate` argument (and then edit "
|
||||
"the resulting file before restarting) or specify the path to an "
|
||||
"existing config file with the SYNAPSE_CONFIG_PATH variable."
|
||||
% (config_path,)
|
||||
)
|
||||
|
||||
log("Starting synapse with config file " + config_path)
|
||||
|
||||
args = ["python", "-m", synapse_worker, "--config-path", config_path]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# The config is maintained as an up-to-date snapshot of the default
|
||||
# This file is maintained as an up-to-date snapshot of the default
|
||||
# homeserver.yaml configuration generated by Synapse.
|
||||
#
|
||||
# It is intended to act as a reference for the default configuration,
|
||||
@@ -10,3 +10,5 @@
|
||||
# homeserver.yaml. Instead, if you are starting from scratch, please generate
|
||||
# a fresh config using Synapse by following the instructions in INSTALL.md.
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
55
docs/ACME.md
55
docs/ACME.md
@@ -1,12 +1,48 @@
|
||||
# ACME
|
||||
|
||||
Synapse v1.0 will require valid TLS certificates for communication between
|
||||
servers (port `8448` by default) in addition to those that are client-facing
|
||||
(port `443`). If you do not already have a valid certificate for your domain,
|
||||
the easiest way to get one is with Synapse's new ACME support, which will use
|
||||
the ACME protocol to provision a certificate automatically. Synapse v0.99.0+
|
||||
will provision server-to-server certificates automatically for you for free
|
||||
through [Let's Encrypt](https://letsencrypt.org/) if you tell it to.
|
||||
From version 1.0 (June 2019) onwards, Synapse requires valid TLS
|
||||
certificates for communication between servers (by default on port
|
||||
`8448`) in addition to those that are client-facing (port `443`). To
|
||||
help homeserver admins fulfil this new requirement, Synapse v0.99.0
|
||||
introduced support for automatically provisioning certificates through
|
||||
[Let's Encrypt](https://letsencrypt.org/) using the ACME protocol.
|
||||
|
||||
## Deprecation of ACME v1
|
||||
|
||||
In [March 2019](https://community.letsencrypt.org/t/end-of-life-plan-for-acmev1/88430),
|
||||
Let's Encrypt announced that they were deprecating version 1 of the ACME
|
||||
protocol, with the plan to disable the use of it for new accounts in
|
||||
November 2019, and for existing accounts in June 2020.
|
||||
|
||||
Synapse doesn't currently support version 2 of the ACME protocol, which
|
||||
means that:
|
||||
|
||||
* for existing installs, Synapse's built-in ACME support will continue
|
||||
to work until June 2020.
|
||||
* for new installs, this feature will not work at all.
|
||||
|
||||
Either way, it is recommended to move from Synapse's ACME support
|
||||
feature to an external automated tool such as [certbot](https://github.com/certbot/certbot)
|
||||
(or browse [this list](https://letsencrypt.org/fr/docs/client-options/)
|
||||
for an alternative ACME client).
|
||||
|
||||
It's also recommended to use a reverse proxy for the server-facing
|
||||
communications (more documentation about this can be found
|
||||
[here](/docs/reverse_proxy.md)) as well as the client-facing ones and
|
||||
have it serve the certificates.
|
||||
|
||||
In case you can't do that and need Synapse to serve them itself, make
|
||||
sure to set the `tls_certificate_path` configuration setting to the path
|
||||
of the certificate (make sure to use the certificate containing the full
|
||||
certification chain, e.g. `fullchain.pem` if using certbot) and
|
||||
`tls_private_key_path` to the path of the matching private key. Note
|
||||
that in this case you will need to restart Synapse after each
|
||||
certificate renewal so that Synapse stops using the old certificate.
|
||||
|
||||
If you still want to use Synapse's built-in ACME support, the rest of
|
||||
this document explains how to set it up.
|
||||
|
||||
## Initial setup
|
||||
|
||||
In the case that your `server_name` config variable is the same as
|
||||
the hostname that the client connects to, then the same certificate can be
|
||||
@@ -32,11 +68,6 @@ If you already have certificates, you will need to back up or delete them
|
||||
(files `example.com.tls.crt` and `example.com.tls.key` in Synapse's root
|
||||
directory), Synapse's ACME implementation will not overwrite them.
|
||||
|
||||
You may wish to use alternate methods such as Certbot to obtain a certificate
|
||||
from Let's Encrypt, depending on your server configuration. Of course, if you
|
||||
already have a valid certificate for your homeserver's domain, that can be
|
||||
placed in Synapse's config directory without the need for any ACME setup.
|
||||
|
||||
## ACME setup
|
||||
|
||||
The main steps for enabling ACME support in short summary are:
|
||||
|
||||
@@ -8,6 +8,9 @@ Depending on the amount of history being purged a call to the API may take
|
||||
several minutes or longer. During this period users will not be able to
|
||||
paginate further back in the room from the point being purged from.
|
||||
|
||||
Note that Synapse requires at least one message in each room, so it will never
|
||||
delete the last message in a room.
|
||||
|
||||
The API is:
|
||||
|
||||
``POST /_synapse/admin/v1/purge_history/<room_id>[/<event_id>]``
|
||||
|
||||
173
docs/admin_api/rooms.md
Normal file
173
docs/admin_api/rooms.md
Normal file
@@ -0,0 +1,173 @@
|
||||
# List Room API
|
||||
|
||||
The List Room admin API allows server admins to get a list of rooms on their
|
||||
server. There are various parameters available that allow for filtering and
|
||||
sorting the returned list. This API supports pagination.
|
||||
|
||||
## Parameters
|
||||
|
||||
The following query parameters are available:
|
||||
|
||||
* `from` - Offset in the returned list. Defaults to `0`.
|
||||
* `limit` - Maximum amount of rooms to return. Defaults to `100`.
|
||||
* `order_by` - The method in which to sort the returned list of rooms. Valid values are:
|
||||
- `alphabetical` - Rooms are ordered alphabetically by room name. This is the default.
|
||||
- `size` - Rooms are ordered by the number of members. Largest to smallest.
|
||||
* `dir` - Direction of room order. Either `f` for forwards or `b` for backwards. Setting
|
||||
this value to `b` will reverse the above sort order. Defaults to `f`.
|
||||
* `search_term` - Filter rooms by their room name. Search term can be contained in any
|
||||
part of the room name. Defaults to no filtering.
|
||||
|
||||
The following fields are possible in the JSON response body:
|
||||
|
||||
* `rooms` - An array of objects, each containing information about a room.
|
||||
- Room objects contain the following fields:
|
||||
- `room_id` - The ID of the room.
|
||||
- `name` - The name of the room.
|
||||
- `canonical_alias` - The canonical (main) alias address of the room.
|
||||
- `joined_members` - How many users are currently in the room.
|
||||
* `offset` - The current pagination offset in rooms. This parameter should be
|
||||
used instead of `next_token` for room offset as `next_token` is
|
||||
not intended to be parsed.
|
||||
* `total_rooms` - The total number of rooms this query can return. Using this
|
||||
and `offset`, you have enough information to know the current
|
||||
progression through the list.
|
||||
* `next_batch` - If this field is present, we know that there are potentially
|
||||
more rooms on the server that did not all fit into this response.
|
||||
We can use `next_batch` to get the "next page" of results. To do
|
||||
so, simply repeat your request, setting the `from` parameter to
|
||||
the value of `next_batch`.
|
||||
* `prev_batch` - If this field is present, it is possible to paginate backwards.
|
||||
Use `prev_batch` for the `from` value in the next request to
|
||||
get the "previous page" of results.
|
||||
|
||||
## Usage
|
||||
|
||||
A standard request with no filtering:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
|
||||
"name": "Matrix HQ",
|
||||
"canonical_alias": "#matrix:matrix.org",
|
||||
"joined_members": 8326
|
||||
},
|
||||
... (8 hidden items) ...
|
||||
{
|
||||
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
|
||||
"name": "This Week In Matrix (TWIM)",
|
||||
"canonical_alias": "#twim:matrix.org",
|
||||
"joined_members": 314
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
"total_rooms": 10
|
||||
}
|
||||
```
|
||||
|
||||
Filtering by room name:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms?search_term=TWIM
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
|
||||
"name": "This Week In Matrix (TWIM)",
|
||||
"canonical_alias": "#twim:matrix.org",
|
||||
"joined_members": 314
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
"total_rooms": 1
|
||||
}
|
||||
```
|
||||
|
||||
Paginating through a list of rooms:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms?order_by=size
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
|
||||
"name": "Matrix HQ",
|
||||
"canonical_alias": "#matrix:matrix.org",
|
||||
"joined_members": 8326
|
||||
},
|
||||
... (98 hidden items) ...
|
||||
{
|
||||
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
|
||||
"name": "This Week In Matrix (TWIM)",
|
||||
"canonical_alias": "#twim:matrix.org",
|
||||
"joined_members": 314
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
"total_rooms": 150
|
||||
"next_token": 100
|
||||
}
|
||||
```
|
||||
|
||||
The presence of the `next_token` parameter tells us that there are more rooms
|
||||
than returned in this request, and we need to make another request to get them.
|
||||
To get the next batch of room results, we repeat our request, setting the `from`
|
||||
parameter to the value of `next_token`.
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms?order_by=size&from=100
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!mscvqgqpHYjBGDxNym:matrix.org",
|
||||
"name": "Music Theory",
|
||||
"canonical_alias": "#musictheory:matrix.org",
|
||||
"joined_members": 127
|
||||
},
|
||||
... (48 hidden items) ...
|
||||
{
|
||||
"room_id": "!twcBhHVdZlQWuuxBhN:termina.org.uk",
|
||||
"name": "weechat-matrix",
|
||||
"canonical_alias": "#weechat-matrix:termina.org.uk",
|
||||
"joined_members": 137
|
||||
}
|
||||
],
|
||||
"offset": 100,
|
||||
"prev_batch": 0,
|
||||
"total_rooms": 150
|
||||
}
|
||||
```
|
||||
|
||||
Once the `next_token` parameter is no longer present, we know we've reached the
|
||||
end of the list.
|
||||
@@ -2,7 +2,8 @@ Create or modify Account
|
||||
========================
|
||||
|
||||
This API allows an administrator to create or modify a user account with a
|
||||
specific ``user_id``.
|
||||
specific ``user_id``. Be aware that ``user_id`` is fully qualified: for example,
|
||||
``@user:server.com``.
|
||||
|
||||
This api is::
|
||||
|
||||
@@ -15,6 +16,16 @@ with a body of:
|
||||
{
|
||||
"password": "user_password",
|
||||
"displayname": "User",
|
||||
"threepids": [
|
||||
{
|
||||
"medium": "email",
|
||||
"address": "<user_mail_1>"
|
||||
},
|
||||
{
|
||||
"medium": "email",
|
||||
"address": "<user_mail_2>"
|
||||
}
|
||||
],
|
||||
"avatar_url": "<avatar_url>",
|
||||
"admin": false,
|
||||
"deactivated": false
|
||||
@@ -23,9 +34,11 @@ with a body of:
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
The parameter ``displayname`` is optional and defaults to ``user_id``.
|
||||
The parameter ``threepids`` is optional.
|
||||
The parameter ``avatar_url`` is optional.
|
||||
The parameter ``admin`` is optional and defaults to 'false'.
|
||||
The parameter ``deactivated`` is optional and defaults to 'false'.
|
||||
The parameter ``password`` is optional. If provided the user's password is updated and all devices are logged out.
|
||||
If the user already exists then optional parameters default to the current value.
|
||||
|
||||
List Accounts
|
||||
@@ -156,11 +169,14 @@ with a body of:
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"new_password": "<secret>"
|
||||
"new_password": "<secret>",
|
||||
"logout_devices": true,
|
||||
}
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
The parameter ``new_password`` is required.
|
||||
The parameter ``logout_devices`` is optional and defaults to ``true``.
|
||||
|
||||
Get whether a user is a server administrator or not
|
||||
===================================================
|
||||
|
||||
@@ -30,7 +30,7 @@ The necessary tools are detailed below.
|
||||
|
||||
Install `flake8` with:
|
||||
|
||||
pip install --upgrade flake8
|
||||
pip install --upgrade flake8 flake8-comprehensions
|
||||
|
||||
Check all application and test code with:
|
||||
|
||||
|
||||
94
docs/delegate.md
Normal file
94
docs/delegate.md
Normal file
@@ -0,0 +1,94 @@
|
||||
# Delegation
|
||||
|
||||
By default, other homeservers will expect to be able to reach yours via
|
||||
your `server_name`, on port 8448. For example, if you set your `server_name`
|
||||
to `example.com` (so that your user names look like `@user:example.com`),
|
||||
other servers will try to connect to yours at `https://example.com:8448/`.
|
||||
|
||||
Delegation is a Matrix feature allowing a homeserver admin to retain a
|
||||
`server_name` of `example.com` so that user IDs, room aliases, etc continue
|
||||
to look like `*:example.com`, whilst having federation traffic routed
|
||||
to a different server and/or port (e.g. `synapse.example.com:443`).
|
||||
|
||||
## .well-known delegation
|
||||
|
||||
To use this method, you need to be able to alter the
|
||||
`server_name` 's https server to serve the `/.well-known/matrix/server`
|
||||
URL. Having an active server (with a valid TLS certificate) serving your
|
||||
`server_name` domain is out of the scope of this documentation.
|
||||
|
||||
The URL `https://<server_name>/.well-known/matrix/server` should
|
||||
return a JSON structure containing the key `m.server` like so:
|
||||
|
||||
```json
|
||||
{
|
||||
"m.server": "<synapse.server.name>[:<yourport>]"
|
||||
}
|
||||
```
|
||||
|
||||
In our example, this would mean that URL `https://example.com/.well-known/matrix/server`
|
||||
should return:
|
||||
|
||||
```json
|
||||
{
|
||||
"m.server": "synapse.example.com:443"
|
||||
}
|
||||
```
|
||||
|
||||
Note, specifying a port is optional. If no port is specified, then it defaults
|
||||
to 8448.
|
||||
|
||||
With .well-known delegation, federating servers will check for a valid TLS
|
||||
certificate for the delegated hostname (in our example: `synapse.example.com`).
|
||||
|
||||
## SRV DNS record delegation
|
||||
|
||||
It is also possible to do delegation using a SRV DNS record. However, that is
|
||||
considered an advanced topic since it's a bit complex to set up, and `.well-known`
|
||||
delegation is already enough in most cases.
|
||||
|
||||
However, if you really need it, you can find some documentation on how such a
|
||||
record should look like and how Synapse will use it in [the Matrix
|
||||
specification](https://matrix.org/docs/spec/server_server/latest#resolving-server-names).
|
||||
|
||||
## Delegation FAQ
|
||||
|
||||
### When do I need delegation?
|
||||
|
||||
If your homeserver's APIs are accessible on the default federation port (8448)
|
||||
and the domain your `server_name` points to, you do not need any delegation.
|
||||
|
||||
For instance, if you registered `example.com` and pointed its DNS A record at a
|
||||
fresh server, you could install Synapse on that host, giving it a `server_name`
|
||||
of `example.com`, and once a reverse proxy has been set up to proxy all requests
|
||||
sent to the port `8448` and serve TLS certificates for `example.com`, you
|
||||
wouldn't need any delegation set up.
|
||||
|
||||
**However**, if your homeserver's APIs aren't accessible on port 8448 and on the
|
||||
domain `server_name` points to, you will need to let other servers know how to
|
||||
find it using delegation.
|
||||
|
||||
### Do you still recommend against using a reverse proxy on the federation port?
|
||||
|
||||
We no longer actively recommend against using a reverse proxy. Many admins will
|
||||
find it easier to direct federation traffic to a reverse proxy and manage their
|
||||
own TLS certificates, and this is a supported configuration.
|
||||
|
||||
See [reverse_proxy.md](reverse_proxy.md) for information on setting up a
|
||||
reverse proxy.
|
||||
|
||||
### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?
|
||||
|
||||
This is no longer necessary. If you are using a reverse proxy for all of your
|
||||
TLS traffic, then you can set `no_tls: True` in the Synapse config.
|
||||
|
||||
In that case, the only reason Synapse needs the certificate is to populate a legacy
|
||||
`tls_fingerprints` field in the federation API. This is ignored by Synapse 0.99.0
|
||||
and later, and the only time pre-0.99 Synapses will check it is when attempting to
|
||||
fetch the server keys - and generally this is delegated via `matrix.org`, which
|
||||
is running a modern version of Synapse.
|
||||
|
||||
### Do I need the same certificate for the client and federation port?
|
||||
|
||||
No. There is nothing stopping you from using different certificates,
|
||||
particularly if you are using a reverse proxy.
|
||||
178
docs/federate.md
178
docs/federate.md
@@ -1,163 +1,41 @@
|
||||
Setting up Federation
|
||||
Setting up federation
|
||||
=====================
|
||||
|
||||
Federation is the process by which users on different servers can participate
|
||||
in the same room. For this to work, those other servers must be able to contact
|
||||
yours to send messages.
|
||||
|
||||
The ``server_name`` configured in the Synapse configuration file (often
|
||||
``homeserver.yaml``) defines how resources (users, rooms, etc.) will be
|
||||
identified (eg: ``@user:example.com``, ``#room:example.com``). By
|
||||
default, it is also the domain that other servers will use to
|
||||
try to reach your server (via port 8448). This is easy to set
|
||||
up and will work provided you set the ``server_name`` to match your
|
||||
machine's public DNS hostname, and provide Synapse with a TLS certificate
|
||||
which is valid for your ``server_name``.
|
||||
The `server_name` configured in the Synapse configuration file (often
|
||||
`homeserver.yaml`) defines how resources (users, rooms, etc.) will be
|
||||
identified (eg: `@user:example.com`, `#room:example.com`). By default,
|
||||
it is also the domain that other servers will use to try to reach your
|
||||
server (via port 8448). This is easy to set up and will work provided
|
||||
you set the `server_name` to match your machine's public DNS hostname.
|
||||
|
||||
For this default configuration to work, you will need to listen for TLS
|
||||
connections on port 8448. The preferred way to do that is by using a
|
||||
reverse proxy: see [reverse_proxy.md](<reverse_proxy.md>) for instructions
|
||||
on how to correctly set one up.
|
||||
|
||||
In some cases you might not want to run Synapse on the machine that has
|
||||
the `server_name` as its public DNS hostname, or you might want federation
|
||||
traffic to use a different port than 8448. For example, you might want to
|
||||
have your user names look like `@user:example.com`, but you want to run
|
||||
Synapse on `synapse.example.com` on port 443. This can be done using
|
||||
delegation, which allows an admin to control where federation traffic should
|
||||
be sent. See [delegate.md](delegate.md) for instructions on how to set this up.
|
||||
|
||||
Once federation has been configured, you should be able to join a room over
|
||||
federation. A good place to start is ``#synapse:matrix.org`` - a room for
|
||||
federation. A good place to start is `#synapse:matrix.org` - a room for
|
||||
Synapse admins.
|
||||
|
||||
|
||||
## Delegation
|
||||
|
||||
For a more flexible configuration, you can have ``server_name``
|
||||
resources (eg: ``@user:example.com``) served by a different host and
|
||||
port (eg: ``synapse.example.com:443``). There are two ways to do this:
|
||||
|
||||
- adding a ``/.well-known/matrix/server`` URL served on ``https://example.com``.
|
||||
- adding a DNS ``SRV`` record in the DNS zone of domain
|
||||
``example.com``.
|
||||
|
||||
Without configuring delegation, the matrix federation will
|
||||
expect to find your server via ``example.com:8448``. The following methods
|
||||
allow you retain a `server_name` of `example.com` so that your user IDs, room
|
||||
aliases, etc continue to look like `*:example.com`, whilst having your
|
||||
federation traffic routed to a different server.
|
||||
|
||||
### .well-known delegation
|
||||
|
||||
To use this method, you need to be able to alter the
|
||||
``server_name`` 's https server to serve the ``/.well-known/matrix/server``
|
||||
URL. Having an active server (with a valid TLS certificate) serving your
|
||||
``server_name`` domain is out of the scope of this documentation.
|
||||
|
||||
The URL ``https://<server_name>/.well-known/matrix/server`` should
|
||||
return a JSON structure containing the key ``m.server`` like so:
|
||||
|
||||
{
|
||||
"m.server": "<synapse.server.name>[:<yourport>]"
|
||||
}
|
||||
|
||||
In our example, this would mean that URL ``https://example.com/.well-known/matrix/server``
|
||||
should return:
|
||||
|
||||
{
|
||||
"m.server": "synapse.example.com:443"
|
||||
}
|
||||
|
||||
Note, specifying a port is optional. If a port is not specified an SRV lookup
|
||||
is performed, as described below. If the target of the
|
||||
delegation does not have an SRV record, then the port defaults to 8448.
|
||||
|
||||
Most installations will not need to configure .well-known. However, it can be
|
||||
useful in cases where the admin is hosting on behalf of someone else and
|
||||
therefore cannot gain access to the necessary certificate. With .well-known,
|
||||
federation servers will check for a valid TLS certificate for the delegated
|
||||
hostname (in our example: ``synapse.example.com``).
|
||||
|
||||
### DNS SRV delegation
|
||||
|
||||
To use this delegation method, you need to have write access to your
|
||||
``server_name`` 's domain zone DNS records (in our example it would be
|
||||
``example.com`` DNS zone).
|
||||
|
||||
This method requires the target server to provide a
|
||||
valid TLS certificate for the original ``server_name``.
|
||||
|
||||
You need to add a SRV record in your ``server_name`` 's DNS zone with
|
||||
this format:
|
||||
|
||||
_matrix._tcp.<yourdomain.com> <ttl> IN SRV <priority> <weight> <port> <synapse.server.name>
|
||||
|
||||
In our example, we would need to add this SRV record in the
|
||||
``example.com`` DNS zone:
|
||||
|
||||
_matrix._tcp.example.com. 3600 IN SRV 10 5 443 synapse.example.com.
|
||||
|
||||
Once done and set up, you can check the DNS record with ``dig -t srv
|
||||
_matrix._tcp.<server_name>``. In our example, we would expect this:
|
||||
|
||||
$ dig -t srv _matrix._tcp.example.com
|
||||
_matrix._tcp.example.com. 3600 IN SRV 10 0 443 synapse.example.com.
|
||||
|
||||
Note that the target of a SRV record cannot be an alias (CNAME record): it has to point
|
||||
directly to the server hosting the synapse instance.
|
||||
|
||||
### Delegation FAQ
|
||||
#### When do I need a SRV record or .well-known URI?
|
||||
|
||||
If your homeserver listens on the default federation port (8448), and your
|
||||
`server_name` points to the host that your homeserver runs on, you do not need an SRV
|
||||
record or `.well-known/matrix/server` URI.
|
||||
|
||||
For instance, if you registered `example.com` and pointed its DNS A record at a
|
||||
fresh server, you could install Synapse on that host,
|
||||
giving it a `server_name` of `example.com`, and once [ACME](acme.md) support is enabled,
|
||||
it would automatically generate a valid TLS certificate for you via Let's Encrypt
|
||||
and no SRV record or .well-known URI would be needed.
|
||||
|
||||
**However**, if your server does not listen on port 8448, or if your `server_name`
|
||||
does not point to the host that your homeserver runs on, you will need to let
|
||||
other servers know how to find it. The way to do this is via .well-known or an
|
||||
SRV record.
|
||||
|
||||
#### I have created a .well-known URI. Do I also need an SRV record?
|
||||
|
||||
No. You can use either `.well-known` delegation or use an SRV record for delegation. You
|
||||
do not need to use both to delegate to the same location.
|
||||
|
||||
#### Can I manage my own certificates rather than having Synapse renew certificates itself?
|
||||
|
||||
Yes, you are welcome to manage your certificates yourself. Synapse will only
|
||||
attempt to obtain certificates from Let's Encrypt if you configure it to do
|
||||
so.The only requirement is that there is a valid TLS cert present for
|
||||
federation end points.
|
||||
|
||||
#### Do you still recommend against using a reverse proxy on the federation port?
|
||||
|
||||
We no longer actively recommend against using a reverse proxy. Many admins will
|
||||
find it easier to direct federation traffic to a reverse proxy and manage their
|
||||
own TLS certificates, and this is a supported configuration.
|
||||
|
||||
See [reverse_proxy.md](reverse_proxy.md) for information on setting up a
|
||||
reverse proxy.
|
||||
|
||||
#### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?
|
||||
|
||||
Practically speaking, this is no longer necessary.
|
||||
|
||||
If you are using a reverse proxy for all of your TLS traffic, then you can set
|
||||
`no_tls: True` in the Synapse config. In that case, the only reason Synapse
|
||||
needs the certificate is to populate a legacy `tls_fingerprints` field in the
|
||||
federation API. This is ignored by Synapse 0.99.0 and later, and the only time
|
||||
pre-0.99 Synapses will check it is when attempting to fetch the server keys -
|
||||
and generally this is delegated via `matrix.org`, which will be running a modern
|
||||
version of Synapse.
|
||||
|
||||
#### Do I need the same certificate for the client and federation port?
|
||||
|
||||
No. There is nothing stopping you from using different certificates,
|
||||
particularly if you are using a reverse proxy. However, Synapse will use the
|
||||
same certificate on any ports where TLS is configured.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
You can use the [federation tester](
|
||||
<https://matrix.org/federationtester>) to check if your homeserver is
|
||||
configured correctly. Alternatively try the [JSON API used by the federation tester](https://matrix.org/federationtester/api/report?server_name=DOMAIN).
|
||||
Note that you'll have to modify this URL to replace ``DOMAIN`` with your
|
||||
``server_name``. Hitting the API directly provides extra detail.
|
||||
You can use the [federation tester](https://matrix.org/federationtester)
|
||||
to check if your homeserver is configured correctly. Alternatively try the
|
||||
[JSON API used by the federation tester](https://matrix.org/federationtester/api/report?server_name=DOMAIN).
|
||||
Note that you'll have to modify this URL to replace `DOMAIN` with your
|
||||
`server_name`. Hitting the API directly provides extra detail.
|
||||
|
||||
The typical failure mode for federation is that when the server tries to join
|
||||
a room, it is rejected with "401: Unauthorized". Generally this means that other
|
||||
@@ -169,8 +47,8 @@ you invite them to. This can be caused by an incorrectly-configured reverse
|
||||
proxy: see [reverse_proxy.md](<reverse_proxy.md>) for instructions on how to correctly
|
||||
configure a reverse proxy.
|
||||
|
||||
## Running a Demo Federation of Synapses
|
||||
## Running a demo federation of Synapses
|
||||
|
||||
If you want to get up and running quickly with a trio of homeservers in a
|
||||
private federation, there is a script in the ``demo`` directory. This is mainly
|
||||
private federation, there is a script in the `demo` directory. This is mainly
|
||||
useful just for development purposes. See [demo/README](<../demo/README>).
|
||||
|
||||
@@ -29,14 +29,13 @@ from synapse.logging import context # omitted from future snippets
|
||||
def handle_request(request_id):
|
||||
request_context = context.LoggingContext()
|
||||
|
||||
calling_context = context.LoggingContext.current_context()
|
||||
context.LoggingContext.set_current_context(request_context)
|
||||
calling_context = context.set_current_context(request_context)
|
||||
try:
|
||||
request_context.request = request_id
|
||||
do_request_handling()
|
||||
logger.debug("finished")
|
||||
finally:
|
||||
context.LoggingContext.set_current_context(calling_context)
|
||||
context.set_current_context(calling_context)
|
||||
|
||||
def do_request_handling():
|
||||
logger.debug("phew") # this will be logged against request_id
|
||||
|
||||
@@ -42,6 +42,10 @@ purged according to its room's policy, then the receiving server will
|
||||
process and store that event until it's picked up by the next purge job,
|
||||
though it will always hide it from clients.
|
||||
|
||||
Synapse requires at least one message in each room, so it will never
|
||||
delete the last message in a room. It will, however, hide it from
|
||||
clients.
|
||||
|
||||
|
||||
## Server configuration
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ Assuming your PostgreSQL database user is called `postgres`, first authenticate
|
||||
su - postgres
|
||||
# Or, if your system uses sudo to get administrative rights
|
||||
sudo -u postgres bash
|
||||
|
||||
|
||||
Then, create a user ``synapse_user`` with:
|
||||
|
||||
createuser --pwprompt synapse_user
|
||||
@@ -63,6 +63,23 @@ You may need to enable password authentication so `synapse_user` can
|
||||
connect to the database. See
|
||||
<https://www.postgresql.org/docs/11/auth-pg-hba-conf.html>.
|
||||
|
||||
### Fixing incorrect `COLLATE` or `CTYPE`
|
||||
|
||||
Synapse will refuse to set up a new database if it has the wrong values of
|
||||
`COLLATE` and `CTYPE` set, and will log warnings on existing databases. Using
|
||||
different locales can cause issues if the locale library is updated from
|
||||
underneath the database, or if a different version of the locale is used on any
|
||||
replicas.
|
||||
|
||||
The safest way to fix the issue is to take a dump and recreate the database with
|
||||
the correct `COLLATE` and `CTYPE` parameters (as shown above). It is also possible to change the
|
||||
parameters on a live database and run a `REINDEX` on the entire database,
|
||||
however extreme care must be taken to avoid database corruption.
|
||||
|
||||
Note that the above may fail with an error about duplicate rows if corruption
|
||||
has already occurred, and such duplicate rows will need to be manually removed.
|
||||
|
||||
|
||||
## Tuning Postgres
|
||||
|
||||
The default settings should be fine for most deployments. For larger
|
||||
@@ -87,19 +104,41 @@ of free memory the database host has available.
|
||||
When you are ready to start using PostgreSQL, edit the `database`
|
||||
section in your config file to match the following lines:
|
||||
|
||||
database:
|
||||
name: psycopg2
|
||||
args:
|
||||
user: <user>
|
||||
password: <pass>
|
||||
database: <db>
|
||||
host: <host>
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
```yaml
|
||||
database:
|
||||
name: psycopg2
|
||||
args:
|
||||
user: <user>
|
||||
password: <pass>
|
||||
database: <db>
|
||||
host: <host>
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
```
|
||||
|
||||
All key, values in `args` are passed to the `psycopg2.connect(..)`
|
||||
function, except keys beginning with `cp_`, which are consumed by the
|
||||
twisted adbapi connection pool.
|
||||
twisted adbapi connection pool. See the [libpq
|
||||
documentation](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS)
|
||||
for a list of options which can be passed.
|
||||
|
||||
You should consider tuning the `args.keepalives_*` options if there is any danger of
|
||||
the connection between your homeserver and database dropping, otherwise Synapse
|
||||
may block for an extended period while it waits for a response from the
|
||||
database server. Example values might be:
|
||||
|
||||
```yaml
|
||||
# seconds of inactivity after which TCP should send a keepalive message to the server
|
||||
keepalives_idle: 10
|
||||
|
||||
# the number of seconds after which a TCP keepalive message that is not
|
||||
# acknowledged by the server should be retransmitted
|
||||
keepalives_interval: 10
|
||||
|
||||
# the number of TCP keepalives that can be lost before the client's connection
|
||||
# to the server is considered dead
|
||||
keepalives_count: 3
|
||||
```
|
||||
|
||||
## Porting from SQLite
|
||||
|
||||
|
||||
@@ -18,9 +18,10 @@ When setting up a reverse proxy, remember that Matrix clients and other
|
||||
Matrix servers do not necessarily need to connect to your server via the
|
||||
same server name or port. Indeed, clients will use port 443 by default,
|
||||
whereas servers default to port 8448. Where these are different, we
|
||||
refer to the 'client port' and the \'federation port\'. See [Setting
|
||||
up federation](federate.md) for more details of the algorithm used for
|
||||
federation connections.
|
||||
refer to the 'client port' and the \'federation port\'. See [the Matrix
|
||||
specification](https://matrix.org/docs/spec/server_server/latest#resolving-server-names)
|
||||
for more details of the algorithm used for federation connections, and
|
||||
[delegate.md](<delegate.md>) for instructions on setting up delegation.
|
||||
|
||||
Let's assume that we expect clients to connect to our server at
|
||||
`https://matrix.example.com`, and other servers to connect at
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# The config is maintained as an up-to-date snapshot of the default
|
||||
# This file is maintained as an up-to-date snapshot of the default
|
||||
# homeserver.yaml configuration generated by Synapse.
|
||||
#
|
||||
# It is intended to act as a reference for the default configuration,
|
||||
@@ -10,6 +10,16 @@
|
||||
# homeserver.yaml. Instead, if you are starting from scratch, please generate
|
||||
# a fresh config using Synapse by following the instructions in INSTALL.md.
|
||||
|
||||
################################################################################
|
||||
|
||||
# Configuration file for Synapse.
|
||||
#
|
||||
# This is a YAML file: see [1] for a quick introduction. Note in particular
|
||||
# that *indentation is important*: all the elements of a list or dictionary
|
||||
# should have the same indentation.
|
||||
#
|
||||
# [1] https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html
|
||||
|
||||
## Server ##
|
||||
|
||||
# The domain name of the server, with optional explicit port.
|
||||
@@ -466,6 +476,11 @@ retention:
|
||||
# ACME support: This will configure Synapse to request a valid TLS certificate
|
||||
# for your configured `server_name` via Let's Encrypt.
|
||||
#
|
||||
# Note that ACME v1 is now deprecated, and Synapse currently doesn't support
|
||||
# ACME v2. This means that this feature currently won't work with installs set
|
||||
# up after November 2019. For more info, and alternative solutions, see
|
||||
# https://github.com/matrix-org/synapse/blob/master/docs/ACME.md#deprecation-of-acme-v1
|
||||
#
|
||||
# Note that provisioning a certificate in this way requires port 80 to be
|
||||
# routed to Synapse so that it can complete the http-01 ACME challenge.
|
||||
# By default, if you enable ACME support, Synapse will attempt to listen on
|
||||
@@ -563,13 +578,46 @@ acme:
|
||||
|
||||
## Database ##
|
||||
|
||||
# The 'database' setting defines the database that synapse uses to store all of
|
||||
# its data.
|
||||
#
|
||||
# 'name' gives the database engine to use: either 'sqlite3' (for SQLite) or
|
||||
# 'psycopg2' (for PostgreSQL).
|
||||
#
|
||||
# 'args' gives options which are passed through to the database engine,
|
||||
# except for options starting 'cp_', which are used to configure the Twisted
|
||||
# connection pool. For a reference to valid arguments, see:
|
||||
# * for sqlite: https://docs.python.org/3/library/sqlite3.html#sqlite3.connect
|
||||
# * for postgres: https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS
|
||||
# * for the connection pool: https://twistedmatrix.com/documents/current/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__
|
||||
#
|
||||
#
|
||||
# Example SQLite configuration:
|
||||
#
|
||||
#database:
|
||||
# name: sqlite3
|
||||
# args:
|
||||
# database: /path/to/homeserver.db
|
||||
#
|
||||
#
|
||||
# Example Postgres configuration:
|
||||
#
|
||||
#database:
|
||||
# name: psycopg2
|
||||
# args:
|
||||
# user: synapse
|
||||
# password: secretpassword
|
||||
# database: synapse
|
||||
# host: localhost
|
||||
# cp_min: 5
|
||||
# cp_max: 10
|
||||
#
|
||||
# For more information on using Synapse with Postgres, see `docs/postgres.md`.
|
||||
#
|
||||
database:
|
||||
# The database engine name
|
||||
name: "sqlite3"
|
||||
# Arguments to pass to the engine
|
||||
name: sqlite3
|
||||
args:
|
||||
# Path to the database
|
||||
database: "DATADIR/homeserver.db"
|
||||
database: DATADIR/homeserver.db
|
||||
|
||||
# Number of events to cache in memory.
|
||||
#
|
||||
@@ -1332,6 +1380,25 @@ saml2_config:
|
||||
#
|
||||
#grandfathered_mxid_source_attribute: upn
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
|
||||
# If you *do* uncomment it, you will need to make sure that all the templates
|
||||
# below are in the directory.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
# * HTML page to display to users if something goes wrong during the
|
||||
# authentication process: 'saml_error.html'.
|
||||
#
|
||||
# This template doesn't currently need any variable to render.
|
||||
#
|
||||
# You can see the default templates at:
|
||||
# https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
|
||||
#
|
||||
#template_dir: "res/templates"
|
||||
|
||||
|
||||
|
||||
# Enable CAS for registration and login.
|
||||
@@ -1345,6 +1412,56 @@ saml2_config:
|
||||
# # name: value
|
||||
|
||||
|
||||
# Additional settings to use with single-sign on systems such as SAML2 and CAS.
|
||||
#
|
||||
sso:
|
||||
# A list of client URLs which are whitelisted so that the user does not
|
||||
# have to confirm giving access to their account to the URL. Any client
|
||||
# whose URL starts with an entry in the following list will not be subject
|
||||
# to an additional confirmation step after the SSO login is completed.
|
||||
#
|
||||
# WARNING: An entry such as "https://my.client" is insecure, because it
|
||||
# will also match "https://my.client.evil.site", exposing your users to
|
||||
# phishing attacks from evil.site. To avoid this, include a slash after the
|
||||
# hostname: "https://my.client/".
|
||||
#
|
||||
# By default, this list is empty.
|
||||
#
|
||||
#client_whitelist:
|
||||
# - https://riot.im/develop
|
||||
# - https://my.custom.client/
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
|
||||
# If you *do* uncomment it, you will need to make sure that all the templates
|
||||
# below are in the directory.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
# * HTML page for a confirmation step before redirecting back to the client
|
||||
# with the login token: 'sso_redirect_confirm.html'.
|
||||
#
|
||||
# When rendering, this template is given three variables:
|
||||
# * redirect_url: the URL the user is about to be redirected to. Needs
|
||||
# manual escaping (see
|
||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
||||
#
|
||||
# * display_url: the same as `redirect_url`, but with the query
|
||||
# parameters stripped. The intention is to have a
|
||||
# human-readable URL to show to users, not to use it as
|
||||
# the final address to redirect to. Needs manual escaping
|
||||
# (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
||||
#
|
||||
# * server_name: the homeserver's name.
|
||||
#
|
||||
# You can see the default templates at:
|
||||
# https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
|
||||
#
|
||||
#template_dir: "res/templates"
|
||||
|
||||
|
||||
# The JWT needs to contain a globally unique "sub" (subject) claim.
|
||||
#
|
||||
#jwt_config:
|
||||
@@ -1394,10 +1511,6 @@ email:
|
||||
#
|
||||
#require_transport_security: true
|
||||
|
||||
# Enable sending emails for messages that the user has missed
|
||||
#
|
||||
#enable_notifs: false
|
||||
|
||||
# notif_from defines the "From" address to use when sending emails.
|
||||
# It must be set if email sending is enabled.
|
||||
#
|
||||
@@ -1415,6 +1528,11 @@ email:
|
||||
#
|
||||
#app_name: my_branded_matrix_server
|
||||
|
||||
# Uncomment the following to enable sending emails for messages that the user
|
||||
# has missed. Disabled by default.
|
||||
#
|
||||
#enable_notifs: true
|
||||
|
||||
# Uncomment the following to disable automatic subscription to email
|
||||
# notifications for new users. Enabled by default.
|
||||
#
|
||||
|
||||
88
docs/spam_checker.md
Normal file
88
docs/spam_checker.md
Normal file
@@ -0,0 +1,88 @@
|
||||
# Handling spam in Synapse
|
||||
|
||||
Synapse has support to customize spam checking behavior. It can plug into a
|
||||
variety of events and affect how they are presented to users on your homeserver.
|
||||
|
||||
The spam checking behavior is implemented as a Python class, which must be
|
||||
able to be imported by the running Synapse.
|
||||
|
||||
## Python spam checker class
|
||||
|
||||
The Python class is instantiated with two objects:
|
||||
|
||||
* Any configuration (see below).
|
||||
* An instance of `synapse.spam_checker_api.SpamCheckerApi`.
|
||||
|
||||
It then implements methods which return a boolean to alter behavior in Synapse.
|
||||
|
||||
There's a generic method for checking every event (`check_event_for_spam`), as
|
||||
well as some specific methods:
|
||||
|
||||
* `user_may_invite`
|
||||
* `user_may_create_room`
|
||||
* `user_may_create_room_alias`
|
||||
* `user_may_publish_room`
|
||||
|
||||
The details of the each of these methods (as well as their inputs and outputs)
|
||||
are documented in the `synapse.events.spamcheck.SpamChecker` class.
|
||||
|
||||
The `SpamCheckerApi` class provides a way for the custom spam checker class to
|
||||
call back into the homeserver internals. It currently implements the following
|
||||
methods:
|
||||
|
||||
* `get_state_events_in_room`
|
||||
|
||||
### Example
|
||||
|
||||
```python
|
||||
class ExampleSpamChecker:
|
||||
def __init__(self, config, api):
|
||||
self.config = config
|
||||
self.api = api
|
||||
|
||||
def check_event_for_spam(self, foo):
|
||||
return False # allow all events
|
||||
|
||||
def user_may_invite(self, inviter_userid, invitee_userid, room_id):
|
||||
return True # allow all invites
|
||||
|
||||
def user_may_create_room(self, userid):
|
||||
return True # allow all room creations
|
||||
|
||||
def user_may_create_room_alias(self, userid, room_alias):
|
||||
return True # allow all room aliases
|
||||
|
||||
def user_may_publish_room(self, userid, room_id):
|
||||
return True # allow publishing of all rooms
|
||||
|
||||
def check_username_for_spam(self, user_profile):
|
||||
return False # allow all usernames
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Modify the `spam_checker` section of your `homeserver.yaml` in the following
|
||||
manner:
|
||||
|
||||
`module` should point to the fully qualified Python class that implements your
|
||||
custom logic, e.g. `my_module.ExampleSpamChecker`.
|
||||
|
||||
`config` is a dictionary that gets passed to the spam checker class.
|
||||
|
||||
### Example
|
||||
|
||||
This section might look like:
|
||||
|
||||
```yaml
|
||||
spam_checker:
|
||||
module: my_module.ExampleSpamChecker
|
||||
config:
|
||||
# Enable or disable a specific option in ExampleSpamChecker.
|
||||
my_custom_option: true
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
The [Mjolnir](https://github.com/matrix-org/mjolnir) project is a full fledged
|
||||
example using the Synapse spam checking API, including a bot for dynamic
|
||||
configuration.
|
||||
@@ -14,16 +14,16 @@ example flow would be (where '>' indicates master to worker and
|
||||
'<' worker to master flows):
|
||||
|
||||
> SERVER example.com
|
||||
< REPLICATE events 53
|
||||
< REPLICATE
|
||||
> POSITION events 53
|
||||
> RDATA events 54 ["$foo1:bar.com", ...]
|
||||
> RDATA events 55 ["$foo4:bar.com", ...]
|
||||
|
||||
The example shows the server accepting a new connection and sending its
|
||||
identity with the `SERVER` command, followed by the client asking to
|
||||
subscribe to the `events` stream from the token `53`. The server then
|
||||
periodically sends `RDATA` commands which have the format
|
||||
`RDATA <stream_name> <token> <row>`, where the format of `<row>` is
|
||||
defined by the individual streams.
|
||||
The example shows the server accepting a new connection and sending its identity
|
||||
with the `SERVER` command, followed by the client server to respond with the
|
||||
position of all streams. The server then periodically sends `RDATA` commands
|
||||
which have the format `RDATA <stream_name> <token> <row>`, where the format of
|
||||
`<row>` is defined by the individual streams.
|
||||
|
||||
Error reporting happens by either the client or server sending an ERROR
|
||||
command, and usually the connection will be closed.
|
||||
@@ -32,9 +32,6 @@ Since the protocol is a simple line based, its possible to manually
|
||||
connect to the server using a tool like netcat. A few things should be
|
||||
noted when manually using the protocol:
|
||||
|
||||
- When subscribing to a stream using `REPLICATE`, the special token
|
||||
`NOW` can be used to get all future updates. The special stream name
|
||||
`ALL` can be used with `NOW` to subscribe to all available streams.
|
||||
- The federation stream is only available if federation sending has
|
||||
been disabled on the main process.
|
||||
- The server will only time connections out that have sent a `PING`
|
||||
@@ -91,9 +88,7 @@ The client:
|
||||
- Sends a `NAME` command, allowing the server to associate a human
|
||||
friendly name with the connection. This is optional.
|
||||
- Sends a `PING` as above
|
||||
- For each stream the client wishes to subscribe to it sends a
|
||||
`REPLICATE` with the `stream_name` and token it wants to subscribe
|
||||
from.
|
||||
- Sends a `REPLICATE` to get the current position of all streams.
|
||||
- On receipt of a `SERVER` command, checks that the server name
|
||||
matches the expected server name.
|
||||
|
||||
@@ -140,9 +135,7 @@ the wire:
|
||||
> PING 1490197665618
|
||||
< NAME synapse.app.appservice
|
||||
< PING 1490197665618
|
||||
< REPLICATE events 1
|
||||
< REPLICATE backfill 1
|
||||
< REPLICATE caches 1
|
||||
< REPLICATE
|
||||
> POSITION events 1
|
||||
> POSITION backfill 1
|
||||
> POSITION caches 1
|
||||
@@ -181,9 +174,9 @@ client (C):
|
||||
|
||||
#### POSITION (S)
|
||||
|
||||
The position of the stream has been updated. Sent to the client
|
||||
after all missing updates for a stream have been sent to the client
|
||||
and they're now up to date.
|
||||
On receipt of a POSITION command clients should check if they have missed any
|
||||
updates, and if so then fetch them out of band. Sent in response to a
|
||||
REPLICATE command (but can happen at any time).
|
||||
|
||||
#### ERROR (S, C)
|
||||
|
||||
@@ -199,20 +192,7 @@ client (C):
|
||||
|
||||
#### REPLICATE (C)
|
||||
|
||||
Asks the server to replicate a given stream. The syntax is:
|
||||
|
||||
```
|
||||
REPLICATE <stream_name> <token>
|
||||
```
|
||||
|
||||
Where `<token>` may be either:
|
||||
* a numeric stream_id to stream updates since (exclusive)
|
||||
* `NOW` to stream all subsequent updates.
|
||||
|
||||
The `<stream_name>` is the name of a replication stream to subscribe
|
||||
to (see [here](../synapse/replication/tcp/streams/_base.py) for a list
|
||||
of streams). It can also be `ALL` to subscribe to all known streams,
|
||||
in which case the `<token>` must be set to `NOW`.
|
||||
Asks the server for the current position of all streams.
|
||||
|
||||
#### USER_SYNC (C)
|
||||
|
||||
@@ -254,6 +234,11 @@ and they key to invalidate. For example:
|
||||
|
||||
> RDATA caches 550953771 ["get_user_by_id", ["@bob:example.com"], 1550574873251]
|
||||
|
||||
Alternatively, an entire cache can be invalidated by sending down a `null`
|
||||
instead of the key. For example:
|
||||
|
||||
> RDATA caches 550953772 ["get_user_by_id", null, 1550574873252]
|
||||
|
||||
However, there are times when a number of caches need to be invalidated
|
||||
at the same time with the same key. To reduce traffic we batch those
|
||||
invalidations into a single poke by defining a special cache name that
|
||||
|
||||
@@ -176,15 +176,34 @@ endpoints matching the following regular expressions:
|
||||
^/_matrix/federation/v1/query_auth/
|
||||
^/_matrix/federation/v1/event_auth/
|
||||
^/_matrix/federation/v1/exchange_third_party_invite/
|
||||
^/_matrix/federation/v1/user/devices/
|
||||
^/_matrix/federation/v1/send/
|
||||
^/_matrix/federation/v1/get_groups_publicised$
|
||||
^/_matrix/key/v2/query
|
||||
|
||||
Additionally, the following REST endpoints can be handled for GET requests:
|
||||
|
||||
^/_matrix/federation/v1/groups/
|
||||
|
||||
The above endpoints should all be routed to the federation_reader worker by the
|
||||
reverse-proxy configuration.
|
||||
|
||||
The `^/_matrix/federation/v1/send/` endpoint must only be handled by a single
|
||||
instance.
|
||||
|
||||
Note that `federation` must be added to the listener resources in the worker config:
|
||||
|
||||
```yaml
|
||||
worker_app: synapse.app.federation_reader
|
||||
...
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: <port>
|
||||
resources:
|
||||
- names:
|
||||
- federation
|
||||
```
|
||||
|
||||
### `synapse.app.federation_sender`
|
||||
|
||||
Handles sending federation traffic to other servers. Doesn't handle any
|
||||
@@ -241,15 +260,20 @@ following regular expressions:
|
||||
^/_matrix/client/(api/v1|r0|unstable)/keys/changes$
|
||||
^/_matrix/client/versions$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/voip/turnServer$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/joined_groups$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups/
|
||||
|
||||
Additionally, the following REST endpoints can be handled for GET requests:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|unstable)/pushrules/.*$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/groups/.*$
|
||||
|
||||
Additionally, the following REST endpoints can be handled, but all requests must
|
||||
be routed to the same instance:
|
||||
|
||||
^/_matrix/client/(r0|unstable)/register$
|
||||
^/_matrix/client/(r0|unstable)/auth/.*/fallback/web$
|
||||
|
||||
Pagination requests can also be handled, but all requests with the same path
|
||||
room must be routed to the same instance. Additionally, care must be taken to
|
||||
@@ -265,6 +289,10 @@ the following regular expressions:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|unstable)/user_directory/search$
|
||||
|
||||
When using this worker you must also set `update_user_directory: False` in the
|
||||
shared configuration file to stop the main synapse running background
|
||||
jobs related to updating the user directory.
|
||||
|
||||
### `synapse.app.frontend_proxy`
|
||||
|
||||
Proxies some frequently-requested client endpoints to add caching and remove
|
||||
|
||||
12
mypy.ini
12
mypy.ini
@@ -7,6 +7,9 @@ show_error_codes = True
|
||||
show_traceback = True
|
||||
mypy_path = stubs
|
||||
|
||||
[mypy-pymacaroons.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-zope]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@@ -63,3 +66,12 @@ ignore_missing_imports = True
|
||||
|
||||
[mypy-sentry_sdk]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-PIL.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jwt.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
# Exits with 0 if there are no problems, or another code otherwise.
|
||||
|
||||
# Fix non-lowercase true/false values
|
||||
sed -i -E "s/: +True/: true/g; s/: +False/: false/g;" docs/sample_config.yaml
|
||||
sed -i.bak -E "s/: +True/: true/g; s/: +False/: false/g;" docs/sample_config.yaml
|
||||
rm docs/sample_config.yaml.bak
|
||||
|
||||
# Check if anything changed
|
||||
git diff --exit-code docs/sample_config.yaml
|
||||
|
||||
@@ -103,7 +103,7 @@ def main():
|
||||
|
||||
yaml.safe_dump(result, sys.stdout, default_flow_style=False)
|
||||
|
||||
rows = list(row for server, json in result.items() for row in rows_v2(server, json))
|
||||
rows = [row for server, json in result.items() for row in rows_v2(server, json)]
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.executemany(
|
||||
|
||||
@@ -22,10 +22,12 @@ import yaml
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
import synapse
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("update_database")
|
||||
|
||||
@@ -38,6 +40,8 @@ class MockHomeserver(HomeServer):
|
||||
config.server_name, reactor=reactor, config=config, **kwargs
|
||||
)
|
||||
|
||||
self.version_string = "Synapse/"+get_version_string(synapse)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
@@ -81,15 +85,17 @@ if __name__ == "__main__":
|
||||
hs.setup()
|
||||
store = hs.get_datastore()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run_background_updates():
|
||||
yield store.db.updates.run_background_updates(sleep=False)
|
||||
async def run_background_updates():
|
||||
await store.db.updates.run_background_updates(sleep=False)
|
||||
# Stop the reactor to exit the script once every background update is run.
|
||||
reactor.stop()
|
||||
|
||||
# Apply all background updates on the database.
|
||||
reactor.callWhenRunning(
|
||||
lambda: run_as_background_process("background_updates", run_background_updates)
|
||||
)
|
||||
def run():
|
||||
# Apply all background updates on the database.
|
||||
defer.ensureDeferred(
|
||||
run_as_background_process("background_updates", run_background_updates)
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
@@ -52,7 +52,7 @@ if __name__ == "__main__":
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
bcrypt_rounds = config.get("bcrypt_rounds", bcrypt_rounds)
|
||||
password_config = config.get("password_config", {})
|
||||
password_config = config.get("password_config", None) or {}
|
||||
password_pepper = password_config.get("pepper", password_pepper)
|
||||
password = args.password
|
||||
|
||||
|
||||
@@ -27,13 +27,16 @@ from six import string_types
|
||||
|
||||
import yaml
|
||||
|
||||
from twisted.enterprise import adbapi
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
import synapse
|
||||
from synapse.config.database import DatabaseConnectionConfig
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.storage._base import LoggingTransaction
|
||||
from synapse.logging.context import (
|
||||
LoggingContext,
|
||||
make_deferred_yieldable,
|
||||
run_in_background,
|
||||
)
|
||||
from synapse.storage.data_stores.main.client_ips import ClientIpBackgroundUpdateStore
|
||||
from synapse.storage.data_stores.main.deviceinbox import (
|
||||
DeviceInboxBackgroundUpdateStore,
|
||||
@@ -61,6 +64,7 @@ from synapse.storage.database import Database, make_conn
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.prepare_database import prepare_database
|
||||
from synapse.util import Clock
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse_port_db")
|
||||
|
||||
@@ -125,6 +129,13 @@ APPEND_ONLY_TABLES = [
|
||||
]
|
||||
|
||||
|
||||
# Error returned by the run function. Used at the top-level part of the script to
|
||||
# handle errors and return codes.
|
||||
end_error = None
|
||||
# The exec_info for the error, if any. If error is defined but not exec_info the script
|
||||
# will show only the error message without the stacktrace, if exec_info is defined but
|
||||
# not the error then the script will show nothing outside of what's printed in the run
|
||||
# function. If both are defined, the script will print both the error and the stacktrace.
|
||||
end_error_exec_info = None
|
||||
|
||||
|
||||
@@ -177,6 +188,7 @@ class MockHomeserver:
|
||||
self.clock = Clock(reactor)
|
||||
self.config = config
|
||||
self.hostname = config.server_name
|
||||
self.version_string = "Synapse/"+get_version_string(synapse)
|
||||
|
||||
def get_clock(self):
|
||||
return self.clock
|
||||
@@ -189,11 +201,10 @@ class Porter(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def setup_table(self, table):
|
||||
async def setup_table(self, table):
|
||||
if table in APPEND_ONLY_TABLES:
|
||||
# It's safe to just carry on inserting.
|
||||
row = yield self.postgres_store.db.simple_select_one(
|
||||
row = await self.postgres_store.db.simple_select_one(
|
||||
table="port_from_sqlite3",
|
||||
keyvalues={"table_name": table},
|
||||
retcols=("forward_rowid", "backward_rowid"),
|
||||
@@ -207,10 +218,10 @@ class Porter(object):
|
||||
forward_chunk,
|
||||
already_ported,
|
||||
total_to_port,
|
||||
) = yield self._setup_sent_transactions()
|
||||
) = await self._setup_sent_transactions()
|
||||
backward_chunk = 0
|
||||
else:
|
||||
yield self.postgres_store.db.simple_insert(
|
||||
await self.postgres_store.db.simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={
|
||||
"table_name": table,
|
||||
@@ -227,7 +238,7 @@ class Porter(object):
|
||||
backward_chunk = row["backward_rowid"]
|
||||
|
||||
if total_to_port is None:
|
||||
already_ported, total_to_port = yield self._get_total_count_to_port(
|
||||
already_ported, total_to_port = await self._get_total_count_to_port(
|
||||
table, forward_chunk, backward_chunk
|
||||
)
|
||||
else:
|
||||
@@ -238,9 +249,9 @@ class Porter(object):
|
||||
)
|
||||
txn.execute("TRUNCATE %s CASCADE" % (table,))
|
||||
|
||||
yield self.postgres_store.execute(delete_all)
|
||||
await self.postgres_store.execute(delete_all)
|
||||
|
||||
yield self.postgres_store.db.simple_insert(
|
||||
await self.postgres_store.db.simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={"table_name": table, "forward_rowid": 1, "backward_rowid": 0},
|
||||
)
|
||||
@@ -248,16 +259,13 @@ class Porter(object):
|
||||
forward_chunk = 1
|
||||
backward_chunk = 0
|
||||
|
||||
already_ported, total_to_port = yield self._get_total_count_to_port(
|
||||
already_ported, total_to_port = await self._get_total_count_to_port(
|
||||
table, forward_chunk, backward_chunk
|
||||
)
|
||||
|
||||
defer.returnValue(
|
||||
(table, already_ported, total_to_port, forward_chunk, backward_chunk)
|
||||
)
|
||||
return table, already_ported, total_to_port, forward_chunk, backward_chunk
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_table(
|
||||
async def handle_table(
|
||||
self, table, postgres_size, table_size, forward_chunk, backward_chunk
|
||||
):
|
||||
logger.info(
|
||||
@@ -275,7 +283,7 @@ class Porter(object):
|
||||
self.progress.add_table(table, postgres_size, table_size)
|
||||
|
||||
if table == "event_search":
|
||||
yield self.handle_search_table(
|
||||
await self.handle_search_table(
|
||||
postgres_size, table_size, forward_chunk, backward_chunk
|
||||
)
|
||||
return
|
||||
@@ -294,7 +302,7 @@ class Porter(object):
|
||||
if table == "user_directory_stream_pos":
|
||||
# We need to make sure there is a single row, `(X, null), as that is
|
||||
# what synapse expects to be there.
|
||||
yield self.postgres_store.db.simple_insert(
|
||||
await self.postgres_store.db.simple_insert(
|
||||
table=table, values={"stream_id": None}
|
||||
)
|
||||
self.progress.update(table, table_size) # Mark table as done
|
||||
@@ -335,7 +343,7 @@ class Porter(object):
|
||||
|
||||
return headers, forward_rows, backward_rows
|
||||
|
||||
headers, frows, brows = yield self.sqlite_store.db.runInteraction(
|
||||
headers, frows, brows = await self.sqlite_store.db.runInteraction(
|
||||
"select", r
|
||||
)
|
||||
|
||||
@@ -361,7 +369,7 @@ class Porter(object):
|
||||
},
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
await self.postgres_store.execute(insert)
|
||||
|
||||
postgres_size += len(rows)
|
||||
|
||||
@@ -369,8 +377,7 @@ class Porter(object):
|
||||
else:
|
||||
return
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_search_table(
|
||||
async def handle_search_table(
|
||||
self, postgres_size, table_size, forward_chunk, backward_chunk
|
||||
):
|
||||
select = (
|
||||
@@ -390,7 +397,7 @@ class Porter(object):
|
||||
|
||||
return headers, rows
|
||||
|
||||
headers, rows = yield self.sqlite_store.db.runInteraction("select", r)
|
||||
headers, rows = await self.sqlite_store.db.runInteraction("select", r)
|
||||
|
||||
if rows:
|
||||
forward_chunk = rows[-1][0] + 1
|
||||
@@ -438,7 +445,7 @@ class Porter(object):
|
||||
},
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
await self.postgres_store.execute(insert)
|
||||
|
||||
postgres_size += len(rows)
|
||||
|
||||
@@ -476,11 +483,10 @@ class Porter(object):
|
||||
|
||||
return store
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run_background_updates_on_postgres(self):
|
||||
async def run_background_updates_on_postgres(self):
|
||||
# Manually apply all background updates on the PostgreSQL database.
|
||||
postgres_ready = (
|
||||
yield self.postgres_store.db.updates.has_completed_background_updates()
|
||||
await self.postgres_store.db.updates.has_completed_background_updates()
|
||||
)
|
||||
|
||||
if not postgres_ready:
|
||||
@@ -489,13 +495,20 @@ class Porter(object):
|
||||
self.progress.set_state("Running background updates on PostgreSQL")
|
||||
|
||||
while not postgres_ready:
|
||||
yield self.postgres_store.db.updates.do_next_background_update(100)
|
||||
postgres_ready = yield (
|
||||
await self.postgres_store.db.updates.do_next_background_update(100)
|
||||
postgres_ready = await (
|
||||
self.postgres_store.db.updates.has_completed_background_updates()
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run(self):
|
||||
async def run(self):
|
||||
"""Ports the SQLite database to a PostgreSQL database.
|
||||
|
||||
When a fatal error is met, its message is assigned to the global "end_error"
|
||||
variable. When this error comes with a stacktrace, its exec_info is assigned to
|
||||
the global "end_error_exec_info" variable.
|
||||
"""
|
||||
global end_error
|
||||
|
||||
try:
|
||||
# we allow people to port away from outdated versions of sqlite.
|
||||
self.sqlite_store = self.build_db_store(
|
||||
@@ -505,21 +518,21 @@ class Porter(object):
|
||||
|
||||
# Check if all background updates are done, abort if not.
|
||||
updates_complete = (
|
||||
yield self.sqlite_store.db.updates.has_completed_background_updates()
|
||||
await self.sqlite_store.db.updates.has_completed_background_updates()
|
||||
)
|
||||
if not updates_complete:
|
||||
sys.stderr.write(
|
||||
end_error = (
|
||||
"Pending background updates exist in the SQLite3 database."
|
||||
" Please start Synapse again and wait until every update has finished"
|
||||
" before running this script.\n"
|
||||
)
|
||||
defer.returnValue(None)
|
||||
return
|
||||
|
||||
self.postgres_store = self.build_db_store(
|
||||
self.hs_config.get_single_database()
|
||||
)
|
||||
|
||||
yield self.run_background_updates_on_postgres()
|
||||
await self.run_background_updates_on_postgres()
|
||||
|
||||
self.progress.set_state("Creating port tables")
|
||||
|
||||
@@ -547,22 +560,22 @@ class Porter(object):
|
||||
)
|
||||
|
||||
try:
|
||||
yield self.postgres_store.db.runInteraction("alter_table", alter_table)
|
||||
await self.postgres_store.db.runInteraction("alter_table", alter_table)
|
||||
except Exception:
|
||||
# On Error Resume Next
|
||||
pass
|
||||
|
||||
yield self.postgres_store.db.runInteraction(
|
||||
await self.postgres_store.db.runInteraction(
|
||||
"create_port_table", create_port_table
|
||||
)
|
||||
|
||||
# Step 2. Get tables.
|
||||
self.progress.set_state("Fetching tables")
|
||||
sqlite_tables = yield self.sqlite_store.db.simple_select_onecol(
|
||||
sqlite_tables = await self.sqlite_store.db.simple_select_onecol(
|
||||
table="sqlite_master", keyvalues={"type": "table"}, retcol="name"
|
||||
)
|
||||
|
||||
postgres_tables = yield self.postgres_store.db.simple_select_onecol(
|
||||
postgres_tables = await self.postgres_store.db.simple_select_onecol(
|
||||
table="information_schema.tables",
|
||||
keyvalues={},
|
||||
retcol="distinct table_name",
|
||||
@@ -573,28 +586,34 @@ class Porter(object):
|
||||
|
||||
# Step 3. Figure out what still needs copying
|
||||
self.progress.set_state("Checking on port progress")
|
||||
setup_res = yield defer.gatherResults(
|
||||
[
|
||||
self.setup_table(table)
|
||||
for table in tables
|
||||
if table not in ["schema_version", "applied_schema_deltas"]
|
||||
and not table.startswith("sqlite_")
|
||||
],
|
||||
consumeErrors=True,
|
||||
setup_res = await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
run_in_background(self.setup_table, table)
|
||||
for table in tables
|
||||
if table not in ["schema_version", "applied_schema_deltas"]
|
||||
and not table.startswith("sqlite_")
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Step 4. Do the copying.
|
||||
self.progress.set_state("Copying to postgres")
|
||||
yield defer.gatherResults(
|
||||
[self.handle_table(*res) for res in setup_res], consumeErrors=True
|
||||
await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[run_in_background(self.handle_table, *res) for res in setup_res],
|
||||
consumeErrors=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Step 5. Do final post-processing
|
||||
yield self._setup_state_group_id_seq()
|
||||
await self._setup_state_group_id_seq()
|
||||
|
||||
self.progress.done()
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
global end_error_exec_info
|
||||
end_error = e
|
||||
end_error_exec_info = sys.exc_info()
|
||||
logger.exception("")
|
||||
finally:
|
||||
@@ -634,8 +653,7 @@ class Porter(object):
|
||||
|
||||
return outrows
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _setup_sent_transactions(self):
|
||||
async def _setup_sent_transactions(self):
|
||||
# Only save things from the last day
|
||||
yesterday = int(time.time() * 1000) - 86400000
|
||||
|
||||
@@ -656,7 +674,7 @@ class Porter(object):
|
||||
|
||||
return headers, [r for r in rows if r[ts_ind] < yesterday]
|
||||
|
||||
headers, rows = yield self.sqlite_store.db.runInteraction("select", r)
|
||||
headers, rows = await self.sqlite_store.db.runInteraction("select", r)
|
||||
|
||||
rows = self._convert_rows("sent_transactions", headers, rows)
|
||||
|
||||
@@ -669,7 +687,7 @@ class Porter(object):
|
||||
txn, "sent_transactions", headers[1:], rows
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
await self.postgres_store.execute(insert)
|
||||
else:
|
||||
max_inserted_rowid = 0
|
||||
|
||||
@@ -686,10 +704,10 @@ class Porter(object):
|
||||
else:
|
||||
return 1
|
||||
|
||||
next_chunk = yield self.sqlite_store.execute(get_start_id)
|
||||
next_chunk = await self.sqlite_store.execute(get_start_id)
|
||||
next_chunk = max(max_inserted_rowid + 1, next_chunk)
|
||||
|
||||
yield self.postgres_store.db.simple_insert(
|
||||
await self.postgres_store.db.simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={
|
||||
"table_name": "sent_transactions",
|
||||
@@ -705,46 +723,49 @@ class Porter(object):
|
||||
(size,) = txn.fetchone()
|
||||
return int(size)
|
||||
|
||||
remaining_count = yield self.sqlite_store.execute(get_sent_table_size)
|
||||
remaining_count = await self.sqlite_store.execute(get_sent_table_size)
|
||||
|
||||
total_count = remaining_count + inserted_rows
|
||||
|
||||
defer.returnValue((next_chunk, inserted_rows, total_count))
|
||||
return next_chunk, inserted_rows, total_count
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_remaining_count_to_port(self, table, forward_chunk, backward_chunk):
|
||||
frows = yield self.sqlite_store.execute_sql(
|
||||
async def _get_remaining_count_to_port(self, table, forward_chunk, backward_chunk):
|
||||
frows = await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
|
||||
)
|
||||
|
||||
brows = yield self.sqlite_store.execute_sql(
|
||||
brows = await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
|
||||
)
|
||||
|
||||
defer.returnValue(frows[0][0] + brows[0][0])
|
||||
return frows[0][0] + brows[0][0]
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_already_ported_count(self, table):
|
||||
rows = yield self.postgres_store.execute_sql(
|
||||
async def _get_already_ported_count(self, table):
|
||||
rows = await self.postgres_store.execute_sql(
|
||||
"SELECT count(*) FROM %s" % (table,)
|
||||
)
|
||||
|
||||
defer.returnValue(rows[0][0])
|
||||
return rows[0][0]
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_total_count_to_port(self, table, forward_chunk, backward_chunk):
|
||||
remaining, done = yield defer.gatherResults(
|
||||
[
|
||||
self._get_remaining_count_to_port(table, forward_chunk, backward_chunk),
|
||||
self._get_already_ported_count(table),
|
||||
],
|
||||
consumeErrors=True,
|
||||
async def _get_total_count_to_port(self, table, forward_chunk, backward_chunk):
|
||||
remaining, done = await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
run_in_background(
|
||||
self._get_remaining_count_to_port,
|
||||
table,
|
||||
forward_chunk,
|
||||
backward_chunk,
|
||||
),
|
||||
run_in_background(self._get_already_ported_count, table),
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
remaining = int(remaining) if remaining else 0
|
||||
done = int(done) if done else 0
|
||||
|
||||
defer.returnValue((done, remaining + done))
|
||||
return done, remaining + done
|
||||
|
||||
def _setup_state_group_id_seq(self):
|
||||
def r(txn):
|
||||
@@ -1010,7 +1031,12 @@ if __name__ == "__main__":
|
||||
hs_config=config,
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(porter.run)
|
||||
@defer.inlineCallbacks
|
||||
def run():
|
||||
with LoggingContext("synapse_port_db_run"):
|
||||
yield defer.ensureDeferred(porter.run())
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
@@ -1019,7 +1045,11 @@ if __name__ == "__main__":
|
||||
else:
|
||||
start()
|
||||
|
||||
if end_error_exec_info:
|
||||
exc_type, exc_value, exc_traceback = end_error_exec_info
|
||||
traceback.print_exception(exc_type, exc_value, exc_traceback)
|
||||
if end_error:
|
||||
if end_error_exec_info:
|
||||
exc_type, exc_value, exc_traceback = end_error_exec_info
|
||||
traceback.print_exception(exc_type, exc_value, exc_traceback)
|
||||
|
||||
sys.stderr.write(end_error)
|
||||
|
||||
sys.exit(5)
|
||||
|
||||
@@ -1,20 +1,31 @@
|
||||
name: matrix-synapse
|
||||
base: core18
|
||||
version: git
|
||||
version: git
|
||||
summary: Reference Matrix homeserver
|
||||
description: |
|
||||
Synapse is the reference Matrix homeserver.
|
||||
Matrix is a federated and decentralised instant messaging and VoIP system.
|
||||
|
||||
grade: stable
|
||||
confinement: strict
|
||||
grade: stable
|
||||
confinement: strict
|
||||
|
||||
apps:
|
||||
matrix-synapse:
|
||||
matrix-synapse:
|
||||
command: synctl --no-daemonize start $SNAP_COMMON/homeserver.yaml
|
||||
stop-command: synctl -c $SNAP_COMMON stop
|
||||
plugs: [network-bind, network]
|
||||
daemon: simple
|
||||
daemon: simple
|
||||
hash-password:
|
||||
command: hash_password
|
||||
generate-config:
|
||||
command: generate_config
|
||||
generate-signing-key:
|
||||
command: generate_signing_key.py
|
||||
register-new-matrix-user:
|
||||
command: register_new_matrix_user
|
||||
plugs: [network]
|
||||
synctl:
|
||||
command: synctl
|
||||
parts:
|
||||
matrix-synapse:
|
||||
source: .
|
||||
|
||||
@@ -36,7 +36,7 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.9.0.dev1"
|
||||
__version__ = "1.12.0"
|
||||
|
||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
# We import here so that we don't have to install a bunch of deps when
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from six import itervalues
|
||||
|
||||
@@ -33,7 +34,9 @@ from synapse.api.errors import (
|
||||
MissingClientTokenError,
|
||||
ResourceLimitError,
|
||||
)
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.config.server import is_threepid_reserved
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import StateMap, UserID
|
||||
from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache
|
||||
from synapse.util.caches.lrucache import LruCache
|
||||
@@ -77,32 +80,48 @@ class Auth(object):
|
||||
self._account_validity = hs.config.account_validity
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_from_context(self, room_version, event, context, do_sig_check=True):
|
||||
def check_from_context(self, room_version: str, event, context, do_sig_check=True):
|
||||
prev_state_ids = yield context.get_prev_state_ids()
|
||||
auth_events_ids = yield self.compute_auth_events(
|
||||
event, prev_state_ids, for_verification=True
|
||||
)
|
||||
auth_events = yield self.store.get_events(auth_events_ids)
|
||||
auth_events = {(e.type, e.state_key): e for e in itervalues(auth_events)}
|
||||
|
||||
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
|
||||
event_auth.check(
|
||||
room_version, event, auth_events=auth_events, do_sig_check=do_sig_check
|
||||
room_version_obj, event, auth_events=auth_events, do_sig_check=do_sig_check
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_joined_room(self, room_id, user_id, current_state=None):
|
||||
"""Check if the user is currently joined in the room
|
||||
def check_user_in_room(
|
||||
self,
|
||||
room_id: str,
|
||||
user_id: str,
|
||||
current_state: Optional[StateMap[EventBase]] = None,
|
||||
allow_departed_users: bool = False,
|
||||
):
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id(str): The room to check.
|
||||
user_id(str): The user to check.
|
||||
current_state(dict): Optional map of the current state of the room.
|
||||
room_id: The room to check.
|
||||
|
||||
user_id: The user to check.
|
||||
|
||||
current_state: Optional map of the current state of the room.
|
||||
If provided then that map is used to check whether they are a
|
||||
member of the room. Otherwise the current membership is
|
||||
loaded from the database.
|
||||
|
||||
allow_departed_users: if True, accept users that were previously
|
||||
members but have now departed.
|
||||
|
||||
Raises:
|
||||
AuthError if the user is not in the room.
|
||||
AuthError if the user is/was not in the room.
|
||||
Returns:
|
||||
A deferred membership event for the user if the user is in
|
||||
the room.
|
||||
Deferred[Optional[EventBase]]:
|
||||
Membership event for the user if the user was in the
|
||||
room. This will be the join event if they are currently joined to
|
||||
the room. This will be the leave event if they have left the room.
|
||||
"""
|
||||
if current_state:
|
||||
member = current_state.get((EventTypes.Member, user_id), None)
|
||||
@@ -110,37 +129,19 @@ class Auth(object):
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id, event_type=EventTypes.Member, state_key=user_id
|
||||
)
|
||||
|
||||
self._check_joined_room(member, user_id, room_id)
|
||||
return member
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_user_was_in_room(self, room_id, user_id):
|
||||
"""Check if the user was in the room at some point.
|
||||
Args:
|
||||
room_id(str): The room to check.
|
||||
user_id(str): The user to check.
|
||||
Raises:
|
||||
AuthError if the user was never in the room.
|
||||
Returns:
|
||||
A deferred membership event for the user if the user was in the
|
||||
room. This will be the join event if they are currently joined to
|
||||
the room. This will be the leave event if they have left the room.
|
||||
"""
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id, event_type=EventTypes.Member, state_key=user_id
|
||||
)
|
||||
membership = member.membership if member else None
|
||||
|
||||
if membership not in (Membership.JOIN, Membership.LEAVE):
|
||||
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||
if membership == Membership.JOIN:
|
||||
return member
|
||||
|
||||
if membership == Membership.LEAVE:
|
||||
# XXX this looks totally bogus. Why do we not allow users who have been banned,
|
||||
# or those who were members previously and have been re-invited?
|
||||
if allow_departed_users and membership == Membership.LEAVE:
|
||||
forgot = yield self.store.did_forget(user_id, room_id)
|
||||
if forgot:
|
||||
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||
if not forgot:
|
||||
return member
|
||||
|
||||
return member
|
||||
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_host_in_room(self, room_id, host):
|
||||
@@ -148,12 +149,6 @@ class Auth(object):
|
||||
latest_event_ids = yield self.store.is_host_joined(room_id, host)
|
||||
return latest_event_ids
|
||||
|
||||
def _check_joined_room(self, member, user_id, room_id):
|
||||
if not member or member.membership != Membership.JOIN:
|
||||
raise AuthError(
|
||||
403, "User %s not in room %s (%s)" % (user_id, room_id, repr(member))
|
||||
)
|
||||
|
||||
def can_federate(self, event, auth_events):
|
||||
creation_event = auth_events.get((EventTypes.Create, ""))
|
||||
|
||||
@@ -543,13 +538,13 @@ class Auth(object):
|
||||
return defer.succeed(auth_ids)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_can_change_room_list(self, room_id, user):
|
||||
"""Check if the user is allowed to edit the room's entry in the
|
||||
def check_can_change_room_list(self, room_id: str, user: UserID):
|
||||
"""Determine whether the user is allowed to edit the room's entry in the
|
||||
published room list.
|
||||
|
||||
Args:
|
||||
room_id (str)
|
||||
user (UserID)
|
||||
room_id
|
||||
user
|
||||
"""
|
||||
|
||||
is_admin = yield self.is_server_admin(user)
|
||||
@@ -557,11 +552,11 @@ class Auth(object):
|
||||
return True
|
||||
|
||||
user_id = user.to_string()
|
||||
yield self.check_joined_room(room_id, user_id)
|
||||
yield self.check_user_in_room(room_id, user_id)
|
||||
|
||||
# We currently require the user is a "moderator" in the room. We do this
|
||||
# by checking if they would (theoretically) be able to change the
|
||||
# m.room.aliases events
|
||||
# m.room.canonical_alias events
|
||||
power_level_event = yield self.state.get_current_state(
|
||||
room_id, EventTypes.PowerLevels, ""
|
||||
)
|
||||
@@ -571,16 +566,11 @@ class Auth(object):
|
||||
auth_events[(EventTypes.PowerLevels, "")] = power_level_event
|
||||
|
||||
send_level = event_auth.get_send_level(
|
||||
EventTypes.Aliases, "", power_level_event
|
||||
EventTypes.CanonicalAlias, "", power_level_event
|
||||
)
|
||||
user_level = event_auth.get_user_power_level(user_id, auth_events)
|
||||
|
||||
if user_level < send_level:
|
||||
raise AuthError(
|
||||
403,
|
||||
"This server requires you to be a moderator in the room to"
|
||||
" edit its room list entry",
|
||||
)
|
||||
return user_level >= send_level
|
||||
|
||||
@staticmethod
|
||||
def has_access_token(request):
|
||||
@@ -630,10 +620,18 @@ class Auth(object):
|
||||
return query_params[0].decode("ascii")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_in_room_or_world_readable(self, room_id, user_id):
|
||||
def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, user_id: str, allow_departed_users: bool = False
|
||||
):
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
Args:
|
||||
room_id: room to check
|
||||
user_id: user to check
|
||||
allow_departed_users: if True, accept users that were previously
|
||||
members but have now departed
|
||||
|
||||
Returns:
|
||||
Deferred[tuple[str, str|None]]: Resolves to the current membership of
|
||||
the user in the room and the membership event ID of the user. If
|
||||
@@ -642,12 +640,14 @@ class Auth(object):
|
||||
"""
|
||||
|
||||
try:
|
||||
# check_user_was_in_room will return the most recent membership
|
||||
# check_user_in_room will return the most recent membership
|
||||
# event for the user if:
|
||||
# * The user is a non-guest user, and was ever in the room
|
||||
# * The user is a guest user, and has joined the room
|
||||
# else it will throw.
|
||||
member_event = yield self.check_user_was_in_room(room_id, user_id)
|
||||
member_event = yield self.check_user_in_room(
|
||||
room_id, user_id, allow_departed_users=allow_departed_users
|
||||
)
|
||||
return member_event.membership, member_event.event_id
|
||||
except AuthError:
|
||||
visibility = yield self.state.get_current_state(
|
||||
@@ -659,7 +659,9 @@ class Auth(object):
|
||||
):
|
||||
return Membership.JOIN, None
|
||||
raise AuthError(
|
||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
||||
403,
|
||||
"User %s not in room %s, and room previews are disabled"
|
||||
% (user_id, room_id),
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
||||
@@ -77,12 +77,11 @@ class EventTypes(object):
|
||||
Aliases = "m.room.aliases"
|
||||
Redaction = "m.room.redaction"
|
||||
ThirdPartyInvite = "m.room.third_party_invite"
|
||||
Encryption = "m.room.encryption"
|
||||
RelatedGroups = "m.room.related_groups"
|
||||
|
||||
RoomHistoryVisibility = "m.room.history_visibility"
|
||||
CanonicalAlias = "m.room.canonical_alias"
|
||||
Encryption = "m.room.encryption"
|
||||
Encrypted = "m.room.encrypted"
|
||||
RoomAvatar = "m.room.avatar"
|
||||
RoomEncryption = "m.room.encryption"
|
||||
GuestAccess = "m.room.guest_access"
|
||||
|
||||
@@ -66,6 +66,7 @@ class Codes(object):
|
||||
EXPIRED_ACCOUNT = "ORG_MATRIX_EXPIRED_ACCOUNT"
|
||||
INVALID_SIGNATURE = "M_INVALID_SIGNATURE"
|
||||
USER_DEACTIVATED = "M_USER_DEACTIVATED"
|
||||
BAD_ALIAS = "M_BAD_ALIAS"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
@@ -402,11 +403,9 @@ class UnsupportedRoomVersionError(SynapseError):
|
||||
"""The client's request to create a room used a room version that the server does
|
||||
not support."""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, msg="Homeserver does not support this room version"):
|
||||
super(UnsupportedRoomVersionError, self).__init__(
|
||||
code=400,
|
||||
msg="Homeserver does not support this room version",
|
||||
errcode=Codes.UNSUPPORTED_ROOM_VERSION,
|
||||
code=400, msg=msg, errcode=Codes.UNSUPPORTED_ROOM_VERSION,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import List
|
||||
|
||||
from six import text_type
|
||||
|
||||
import jsonschema
|
||||
@@ -293,7 +295,7 @@ class Filter(object):
|
||||
room_id = None
|
||||
ev_type = "m.presence"
|
||||
contains_url = False
|
||||
labels = []
|
||||
labels = [] # type: List[str]
|
||||
else:
|
||||
sender = event.get("sender", None)
|
||||
if not sender:
|
||||
|
||||
@@ -12,7 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import collections
|
||||
from collections import OrderedDict
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
from synapse.api.errors import LimitExceededError
|
||||
|
||||
@@ -23,7 +24,9 @@ class Ratelimiter(object):
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.message_counts = collections.OrderedDict()
|
||||
self.message_counts = (
|
||||
OrderedDict()
|
||||
) # type: OrderedDict[Any, Tuple[float, int, Optional[float]]]
|
||||
|
||||
def can_do_action(self, key, time_now_s, rate_hz, burst_count, update=True):
|
||||
"""Can the entity (e.g. user or IP address) perform the action?
|
||||
|
||||
@@ -57,6 +57,9 @@ class RoomVersion(object):
|
||||
state_res = attr.ib() # int; one of the StateResolutionVersions
|
||||
enforce_key_validity = attr.ib() # bool
|
||||
|
||||
# bool: before MSC2261/MSC2432, m.room.aliases had special auth rules and redaction rules
|
||||
special_case_aliases_auth = attr.ib(type=bool, default=False)
|
||||
|
||||
|
||||
class RoomVersions(object):
|
||||
V1 = RoomVersion(
|
||||
@@ -65,6 +68,7 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V1,
|
||||
StateResolutionVersions.V1,
|
||||
enforce_key_validity=False,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
V2 = RoomVersion(
|
||||
"2",
|
||||
@@ -72,6 +76,7 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V1,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=False,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
V3 = RoomVersion(
|
||||
"3",
|
||||
@@ -79,6 +84,7 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V2,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=False,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
V4 = RoomVersion(
|
||||
"4",
|
||||
@@ -86,6 +92,7 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V3,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=False,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
V5 = RoomVersion(
|
||||
"5",
|
||||
@@ -93,6 +100,15 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V3,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
MSC2432_DEV = RoomVersion(
|
||||
"org.matrix.msc2432",
|
||||
RoomDisposition.UNSTABLE,
|
||||
EventFormatVersions.V3,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=False,
|
||||
)
|
||||
|
||||
|
||||
@@ -104,5 +120,6 @@ KNOWN_ROOM_VERSIONS = {
|
||||
RoomVersions.V3,
|
||||
RoomVersions.V4,
|
||||
RoomVersions.V5,
|
||||
RoomVersions.MSC2432_DEV,
|
||||
)
|
||||
} # type: Dict[str, RoomVersion]
|
||||
|
||||
@@ -141,7 +141,7 @@ def start_reactor(
|
||||
|
||||
def quit_with_error(error_string):
|
||||
message_lines = error_string.split("\n")
|
||||
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
||||
line_length = max(len(l) for l in message_lines if len(l) < 80) + 2
|
||||
sys.stderr.write("*" * line_length + "\n")
|
||||
for line in message_lines:
|
||||
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
||||
@@ -276,9 +276,19 @@ def start(hs, listeners=None):
|
||||
# It is now safe to start your Synapse.
|
||||
hs.start_listening(listeners)
|
||||
hs.get_datastore().db.start_profiling()
|
||||
hs.get_pusherpool().start()
|
||||
|
||||
setup_sentry(hs)
|
||||
setup_sdnotify(hs)
|
||||
|
||||
# We now freeze all allocated objects in the hopes that (almost)
|
||||
# everything currently allocated are things that will be used for the
|
||||
# rest of time. Doing so means less work each GC (hopefully).
|
||||
#
|
||||
# This only works on Python 3.7
|
||||
if sys.version_info >= (3, 7):
|
||||
gc.collect()
|
||||
gc.freeze()
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
reactor = hs.get_reactor()
|
||||
|
||||
@@ -13,161 +13,11 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import LoggingContext, run_in_background
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.directory import DirectoryStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.server import HomeServer
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse.app.appservice")
|
||||
|
||||
|
||||
class AppserviceSlaveStore(
|
||||
DirectoryStore,
|
||||
SlavedEventStore,
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedRegistrationStore,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class AppserviceServer(HomeServer):
|
||||
DATASTORE_CLASS = AppserviceSlaveStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
bind_addresses = listener_config["bind_addresses"]
|
||||
site_tag = listener_config.get("tag", port)
|
||||
resources = {}
|
||||
for res in listener_config["resources"]:
|
||||
for name in res["names"]:
|
||||
if name == "metrics":
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
_base.listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
SynapseSite(
|
||||
"synapse.access.http.%s" % (site_tag,),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse appservice now listening on port %d", port)
|
||||
|
||||
def start_listening(self, listeners):
|
||||
for listener in listeners:
|
||||
if listener["type"] == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener["type"] == "manhole":
|
||||
_base.listen_tcp(
|
||||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warning(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warning("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
self.get_tcp_replication().start_replication(self)
|
||||
|
||||
def build_tcp_replication(self):
|
||||
return ASReplicationHandler(self)
|
||||
|
||||
|
||||
class ASReplicationHandler(ReplicationClientHandler):
|
||||
def __init__(self, hs):
|
||||
super(ASReplicationHandler, self).__init__(hs.get_datastore())
|
||||
self.appservice_handler = hs.get_application_service_handler()
|
||||
|
||||
async def on_rdata(self, stream_name, token, rows):
|
||||
await super(ASReplicationHandler, self).on_rdata(stream_name, token, rows)
|
||||
|
||||
if stream_name == "events":
|
||||
max_stream_id = self.store.get_room_max_stream_ordering()
|
||||
run_in_background(self._notify_app_services, max_stream_id)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _notify_app_services(self, room_stream_id):
|
||||
try:
|
||||
yield self.appservice_handler.notify_interested_services(room_stream_id)
|
||||
except Exception:
|
||||
logger.exception("Error notifying application services of event")
|
||||
|
||||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config("Synapse appservice", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.appservice"
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
if config.notify_appservices:
|
||||
sys.stderr.write(
|
||||
"\nThe appservices must be disabled in the main synapse process"
|
||||
"\nbefore they can be run in a separate worker."
|
||||
"\nPlease add ``notify_appservices: false`` to the main config"
|
||||
"\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Force the pushers to start since they will be disabled in the main config
|
||||
config.notify_appservices = True
|
||||
|
||||
ps = AppserviceServer(
|
||||
config.server_name,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
)
|
||||
|
||||
setup_logging(ps, config, use_worker_options=True)
|
||||
|
||||
ps.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ps, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-appservice", config)
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
|
||||
@@ -13,188 +13,11 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.http.server import JsonResource
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
|
||||
from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore
|
||||
from synapse.replication.slave.storage.devices import SlavedDeviceStore
|
||||
from synapse.replication.slave.storage.directory import DirectoryStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.groups import SlavedGroupServerStore
|
||||
from synapse.replication.slave.storage.keys import SlavedKeyStore
|
||||
from synapse.replication.slave.storage.profile import SlavedProfileStore
|
||||
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.rest.client.v1.login import LoginRestServlet
|
||||
from synapse.rest.client.v1.push_rule import PushRuleRestServlet
|
||||
from synapse.rest.client.v1.room import (
|
||||
JoinedRoomMemberListRestServlet,
|
||||
PublicRoomListRestServlet,
|
||||
RoomEventContextServlet,
|
||||
RoomMemberListRestServlet,
|
||||
RoomMessageListRestServlet,
|
||||
RoomStateRestServlet,
|
||||
)
|
||||
from synapse.rest.client.v1.voip import VoipRestServlet
|
||||
from synapse.rest.client.v2_alpha.account import ThreepidRestServlet
|
||||
from synapse.rest.client.v2_alpha.keys import KeyChangesServlet, KeyQueryServlet
|
||||
from synapse.rest.client.v2_alpha.register import RegisterRestServlet
|
||||
from synapse.rest.client.versions import VersionsRestServlet
|
||||
from synapse.server import HomeServer
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse.app.client_reader")
|
||||
|
||||
|
||||
class ClientReaderSlavedStore(
|
||||
SlavedDeviceInboxStore,
|
||||
SlavedDeviceStore,
|
||||
SlavedReceiptsStore,
|
||||
SlavedPushRuleStore,
|
||||
SlavedGroupServerStore,
|
||||
SlavedAccountDataStore,
|
||||
SlavedEventStore,
|
||||
SlavedKeyStore,
|
||||
RoomStore,
|
||||
DirectoryStore,
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedRegistrationStore,
|
||||
SlavedTransactionStore,
|
||||
SlavedProfileStore,
|
||||
SlavedClientIpStore,
|
||||
BaseSlavedStore,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class ClientReaderServer(HomeServer):
|
||||
DATASTORE_CLASS = ClientReaderSlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
bind_addresses = listener_config["bind_addresses"]
|
||||
site_tag = listener_config.get("tag", port)
|
||||
resources = {}
|
||||
for res in listener_config["resources"]:
|
||||
for name in res["names"]:
|
||||
if name == "metrics":
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
elif name == "client":
|
||||
resource = JsonResource(self, canonical_json=False)
|
||||
|
||||
PublicRoomListRestServlet(self).register(resource)
|
||||
RoomMemberListRestServlet(self).register(resource)
|
||||
JoinedRoomMemberListRestServlet(self).register(resource)
|
||||
RoomStateRestServlet(self).register(resource)
|
||||
RoomEventContextServlet(self).register(resource)
|
||||
RoomMessageListRestServlet(self).register(resource)
|
||||
RegisterRestServlet(self).register(resource)
|
||||
LoginRestServlet(self).register(resource)
|
||||
ThreepidRestServlet(self).register(resource)
|
||||
KeyQueryServlet(self).register(resource)
|
||||
KeyChangesServlet(self).register(resource)
|
||||
VoipRestServlet(self).register(resource)
|
||||
PushRuleRestServlet(self).register(resource)
|
||||
VersionsRestServlet(self).register(resource)
|
||||
|
||||
resources.update({"/_matrix/client": resource})
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
_base.listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
SynapseSite(
|
||||
"synapse.access.http.%s" % (site_tag,),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse client reader now listening on port %d", port)
|
||||
|
||||
def start_listening(self, listeners):
|
||||
for listener in listeners:
|
||||
if listener["type"] == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener["type"] == "manhole":
|
||||
_base.listen_tcp(
|
||||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warning(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warning("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
self.get_tcp_replication().start_replication(self)
|
||||
|
||||
def build_tcp_replication(self):
|
||||
return ReplicationClientHandler(self.get_datastore())
|
||||
|
||||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config("Synapse client reader", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.client_reader"
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
ss = ClientReaderServer(
|
||||
config.server_name,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-client-reader", config)
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
|
||||
@@ -13,187 +13,11 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.http.server import JsonResource
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
|
||||
from synapse.replication.slave.storage.devices import SlavedDeviceStore
|
||||
from synapse.replication.slave.storage.directory import DirectoryStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.profile import SlavedProfileStore
|
||||
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
|
||||
from synapse.replication.slave.storage.pushers import SlavedPusherStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.rest.client.v1.profile import (
|
||||
ProfileAvatarURLRestServlet,
|
||||
ProfileDisplaynameRestServlet,
|
||||
ProfileRestServlet,
|
||||
)
|
||||
from synapse.rest.client.v1.room import (
|
||||
JoinRoomAliasServlet,
|
||||
RoomMembershipRestServlet,
|
||||
RoomSendEventRestServlet,
|
||||
RoomStateEventRestServlet,
|
||||
)
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.data_stores.main.user_directory import UserDirectoryStore
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse.app.event_creator")
|
||||
|
||||
|
||||
class EventCreatorSlavedStore(
|
||||
# FIXME(#3714): We need to add UserDirectoryStore as we write directly
|
||||
# rather than going via the correct worker.
|
||||
UserDirectoryStore,
|
||||
DirectoryStore,
|
||||
SlavedTransactionStore,
|
||||
SlavedProfileStore,
|
||||
SlavedAccountDataStore,
|
||||
SlavedPusherStore,
|
||||
SlavedReceiptsStore,
|
||||
SlavedPushRuleStore,
|
||||
SlavedDeviceStore,
|
||||
SlavedClientIpStore,
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedEventStore,
|
||||
SlavedRegistrationStore,
|
||||
RoomStore,
|
||||
BaseSlavedStore,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class EventCreatorServer(HomeServer):
|
||||
DATASTORE_CLASS = EventCreatorSlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
bind_addresses = listener_config["bind_addresses"]
|
||||
site_tag = listener_config.get("tag", port)
|
||||
resources = {}
|
||||
for res in listener_config["resources"]:
|
||||
for name in res["names"]:
|
||||
if name == "metrics":
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
elif name == "client":
|
||||
resource = JsonResource(self, canonical_json=False)
|
||||
RoomSendEventRestServlet(self).register(resource)
|
||||
RoomMembershipRestServlet(self).register(resource)
|
||||
RoomStateEventRestServlet(self).register(resource)
|
||||
JoinRoomAliasServlet(self).register(resource)
|
||||
ProfileAvatarURLRestServlet(self).register(resource)
|
||||
ProfileDisplaynameRestServlet(self).register(resource)
|
||||
ProfileRestServlet(self).register(resource)
|
||||
resources.update(
|
||||
{
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
"/_matrix/client/v2_alpha": resource,
|
||||
"/_matrix/client/api/v1": resource,
|
||||
}
|
||||
)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
_base.listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
SynapseSite(
|
||||
"synapse.access.http.%s" % (site_tag,),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse event creator now listening on port %d", port)
|
||||
|
||||
def start_listening(self, listeners):
|
||||
for listener in listeners:
|
||||
if listener["type"] == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener["type"] == "manhole":
|
||||
_base.listen_tcp(
|
||||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warning(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warning("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
self.get_tcp_replication().start_replication(self)
|
||||
|
||||
def build_tcp_replication(self):
|
||||
return ReplicationClientHandler(self.get_datastore())
|
||||
|
||||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config("Synapse event creator", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.event_creator"
|
||||
|
||||
assert config.worker_replication_http_port is not None
|
||||
|
||||
# This should only be done on the user directory worker or the master
|
||||
config.update_user_directory = False
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
ss = EventCreatorServer(
|
||||
config.server_name,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-event-creator", config)
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
|
||||
@@ -13,169 +13,11 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.api.urls import FEDERATION_PREFIX, SERVER_KEY_V2_PREFIX
|
||||
from synapse.app import _base
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.directory import DirectoryStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.keys import SlavedKeyStore
|
||||
from synapse.replication.slave.storage.profile import SlavedProfileStore
|
||||
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
|
||||
from synapse.replication.slave.storage.pushers import SlavedPusherStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||
from synapse.server import HomeServer
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse.app.federation_reader")
|
||||
|
||||
|
||||
class FederationReaderSlavedStore(
|
||||
SlavedAccountDataStore,
|
||||
SlavedProfileStore,
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedPusherStore,
|
||||
SlavedPushRuleStore,
|
||||
SlavedReceiptsStore,
|
||||
SlavedEventStore,
|
||||
SlavedKeyStore,
|
||||
SlavedRegistrationStore,
|
||||
RoomStore,
|
||||
DirectoryStore,
|
||||
SlavedTransactionStore,
|
||||
BaseSlavedStore,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class FederationReaderServer(HomeServer):
|
||||
DATASTORE_CLASS = FederationReaderSlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
bind_addresses = listener_config["bind_addresses"]
|
||||
site_tag = listener_config.get("tag", port)
|
||||
resources = {}
|
||||
for res in listener_config["resources"]:
|
||||
for name in res["names"]:
|
||||
if name == "metrics":
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
elif name == "federation":
|
||||
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
||||
if name == "openid" and "federation" not in res["names"]:
|
||||
# Only load the openid resource separately if federation resource
|
||||
# is not specified since federation resource includes openid
|
||||
# resource.
|
||||
resources.update(
|
||||
{
|
||||
FEDERATION_PREFIX: TransportLayerServer(
|
||||
self, servlet_groups=["openid"]
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
if name in ["keys", "federation"]:
|
||||
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
_base.listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
SynapseSite(
|
||||
"synapse.access.http.%s" % (site_tag,),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
),
|
||||
reactor=self.get_reactor(),
|
||||
)
|
||||
|
||||
logger.info("Synapse federation reader now listening on port %d", port)
|
||||
|
||||
def start_listening(self, listeners):
|
||||
for listener in listeners:
|
||||
if listener["type"] == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener["type"] == "manhole":
|
||||
_base.listen_tcp(
|
||||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warning(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warning("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
self.get_tcp_replication().start_replication(self)
|
||||
|
||||
def build_tcp_replication(self):
|
||||
return ReplicationClientHandler(self.get_datastore())
|
||||
|
||||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse federation reader", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.federation_reader"
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
ss = FederationReaderServer(
|
||||
config.server_name,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-federation-reader", config)
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
|
||||
@@ -13,290 +13,11 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.federation import send_queue
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import LoggingContext, run_in_background
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore
|
||||
from synapse.replication.slave.storage.devices import SlavedDeviceStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.presence import SlavedPresenceStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.replication.tcp.streams._base import ReceiptsStream
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.database import Database
|
||||
from synapse.types import ReadReceipt
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse.app.federation_sender")
|
||||
|
||||
|
||||
class FederationSenderSlaveStore(
|
||||
SlavedDeviceInboxStore,
|
||||
SlavedTransactionStore,
|
||||
SlavedReceiptsStore,
|
||||
SlavedEventStore,
|
||||
SlavedRegistrationStore,
|
||||
SlavedDeviceStore,
|
||||
SlavedPresenceStore,
|
||||
):
|
||||
def __init__(self, database: Database, db_conn, hs):
|
||||
super(FederationSenderSlaveStore, self).__init__(database, db_conn, hs)
|
||||
|
||||
# We pull out the current federation stream position now so that we
|
||||
# always have a known value for the federation position in memory so
|
||||
# that we don't have to bounce via a deferred once when we start the
|
||||
# replication streams.
|
||||
self.federation_out_pos_startup = self._get_federation_out_pos(db_conn)
|
||||
|
||||
def _get_federation_out_pos(self, db_conn):
|
||||
sql = "SELECT stream_id FROM federation_stream_position WHERE type = ?"
|
||||
sql = self.database_engine.convert_param_style(sql)
|
||||
|
||||
txn = db_conn.cursor()
|
||||
txn.execute(sql, ("federation",))
|
||||
rows = txn.fetchall()
|
||||
txn.close()
|
||||
|
||||
return rows[0][0] if rows else -1
|
||||
|
||||
|
||||
class FederationSenderServer(HomeServer):
|
||||
DATASTORE_CLASS = FederationSenderSlaveStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
bind_addresses = listener_config["bind_addresses"]
|
||||
site_tag = listener_config.get("tag", port)
|
||||
resources = {}
|
||||
for res in listener_config["resources"]:
|
||||
for name in res["names"]:
|
||||
if name == "metrics":
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
_base.listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
SynapseSite(
|
||||
"synapse.access.http.%s" % (site_tag,),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse federation_sender now listening on port %d", port)
|
||||
|
||||
def start_listening(self, listeners):
|
||||
for listener in listeners:
|
||||
if listener["type"] == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener["type"] == "manhole":
|
||||
_base.listen_tcp(
|
||||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warning(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warning("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
self.get_tcp_replication().start_replication(self)
|
||||
|
||||
def build_tcp_replication(self):
|
||||
return FederationSenderReplicationHandler(self)
|
||||
|
||||
|
||||
class FederationSenderReplicationHandler(ReplicationClientHandler):
|
||||
def __init__(self, hs):
|
||||
super(FederationSenderReplicationHandler, self).__init__(hs.get_datastore())
|
||||
self.send_handler = FederationSenderHandler(hs, self)
|
||||
|
||||
async def on_rdata(self, stream_name, token, rows):
|
||||
await super(FederationSenderReplicationHandler, self).on_rdata(
|
||||
stream_name, token, rows
|
||||
)
|
||||
self.send_handler.process_replication_rows(stream_name, token, rows)
|
||||
|
||||
def get_streams_to_replicate(self):
|
||||
args = super(
|
||||
FederationSenderReplicationHandler, self
|
||||
).get_streams_to_replicate()
|
||||
args.update(self.send_handler.stream_positions())
|
||||
return args
|
||||
|
||||
def on_remote_server_up(self, server: str):
|
||||
"""Called when get a new REMOTE_SERVER_UP command."""
|
||||
|
||||
# Let's wake up the transaction queue for the server in case we have
|
||||
# pending stuff to send to it.
|
||||
self.send_handler.wake_destination(server)
|
||||
|
||||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse federation sender", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.federation_sender"
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
if config.send_federation:
|
||||
sys.stderr.write(
|
||||
"\nThe send_federation must be disabled in the main synapse process"
|
||||
"\nbefore they can be run in a separate worker."
|
||||
"\nPlease add ``send_federation: false`` to the main config"
|
||||
"\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Force the pushers to start since they will be disabled in the main config
|
||||
config.send_federation = True
|
||||
|
||||
ss = FederationSenderServer(
|
||||
config.server_name,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-federation-sender", config)
|
||||
|
||||
|
||||
class FederationSenderHandler(object):
|
||||
"""Processes the replication stream and forwards the appropriate entries
|
||||
to the federation sender.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: FederationSenderServer, replication_client):
|
||||
self.store = hs.get_datastore()
|
||||
self._is_mine_id = hs.is_mine_id
|
||||
self.federation_sender = hs.get_federation_sender()
|
||||
self.replication_client = replication_client
|
||||
|
||||
self.federation_position = self.store.federation_out_pos_startup
|
||||
self._fed_position_linearizer = Linearizer(name="_fed_position_linearizer")
|
||||
|
||||
self._last_ack = self.federation_position
|
||||
|
||||
self._room_serials = {}
|
||||
self._room_typing = {}
|
||||
|
||||
def on_start(self):
|
||||
# There may be some events that are persisted but haven't been sent,
|
||||
# so send them now.
|
||||
self.federation_sender.notify_new_events(
|
||||
self.store.get_room_max_stream_ordering()
|
||||
)
|
||||
|
||||
def wake_destination(self, server: str):
|
||||
self.federation_sender.wake_destination(server)
|
||||
|
||||
def stream_positions(self):
|
||||
return {"federation": self.federation_position}
|
||||
|
||||
def process_replication_rows(self, stream_name, token, rows):
|
||||
# The federation stream contains things that we want to send out, e.g.
|
||||
# presence, typing, etc.
|
||||
if stream_name == "federation":
|
||||
send_queue.process_rows_for_federation(self.federation_sender, rows)
|
||||
run_in_background(self.update_token, token)
|
||||
|
||||
# We also need to poke the federation sender when new events happen
|
||||
elif stream_name == "events":
|
||||
self.federation_sender.notify_new_events(token)
|
||||
|
||||
# ... and when new receipts happen
|
||||
elif stream_name == ReceiptsStream.NAME:
|
||||
run_as_background_process(
|
||||
"process_receipts_for_federation", self._on_new_receipts, rows
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _on_new_receipts(self, rows):
|
||||
"""
|
||||
Args:
|
||||
rows (iterable[synapse.replication.tcp.streams.ReceiptsStreamRow]):
|
||||
new receipts to be processed
|
||||
"""
|
||||
for receipt in rows:
|
||||
# we only want to send on receipts for our own users
|
||||
if not self._is_mine_id(receipt.user_id):
|
||||
continue
|
||||
receipt_info = ReadReceipt(
|
||||
receipt.room_id,
|
||||
receipt.receipt_type,
|
||||
receipt.user_id,
|
||||
[receipt.event_id],
|
||||
receipt.data,
|
||||
)
|
||||
yield self.federation_sender.send_read_receipt(receipt_info)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update_token(self, token):
|
||||
try:
|
||||
self.federation_position = token
|
||||
|
||||
# We linearize here to ensure we don't have races updating the token
|
||||
with (yield self._fed_position_linearizer.queue(None)):
|
||||
if self._last_ack < self.federation_position:
|
||||
yield self.store.update_federation_out_pos(
|
||||
"federation", self.federation_position
|
||||
)
|
||||
|
||||
# We ACK this token over replication so that the master can drop
|
||||
# its in memory queues
|
||||
self.replication_client.send_federation_ack(
|
||||
self.federation_position
|
||||
)
|
||||
self._last_ack = self.federation_position
|
||||
except Exception:
|
||||
logger.exception("Error updating federation stream position")
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user