Compare commits
425 Commits
anoa/remov
...
anoa/v2_is
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
460f09d32a | ||
|
|
cee00a3584 | ||
|
|
bed1e94ca0 | ||
|
|
0766ef23cf | ||
|
|
2a012e8a04 | ||
|
|
4548d1f87e | ||
|
|
4fca313389 | ||
|
|
4765f0cfd9 | ||
|
|
d19505a8c1 | ||
|
|
2be726a4dd | ||
|
|
3057095a5d | ||
|
|
a578c422f4 | ||
|
|
5625abe503 | ||
|
|
60e447d627 | ||
|
|
e7011280c7 | ||
|
|
6fcab5b4cf | ||
|
|
a640aa6e18 | ||
|
|
92c1550f4a | ||
|
|
c8fa620d7a | ||
|
|
deca277d09 | ||
|
|
5798a134c0 | ||
|
|
71fc04069a | ||
|
|
849d8dc19f | ||
|
|
4dc08495b8 | ||
|
|
8f1346d82b | ||
|
|
38dac2774f | ||
|
|
6d97843793 | ||
|
|
7dc398586c | ||
|
|
e68d648594 | ||
|
|
49ef8ec399 | ||
|
|
a3f0635686 | ||
|
|
1196ee32b3 | ||
|
|
7ccc251415 | ||
|
|
dfd10f5133 | ||
|
|
91caa5b430 | ||
|
|
1b959b6977 | ||
|
|
c88a119259 | ||
|
|
75ef0f8b1d | ||
|
|
7bfccadf31 | ||
|
|
322ccac33f | ||
|
|
ccb15a5bbe | ||
|
|
f5b50d0871 | ||
|
|
e7577427c9 | ||
|
|
7837a5f2ea | ||
|
|
1a7e6eb633 | ||
|
|
d1e0b91083 | ||
|
|
62a1639287 | ||
|
|
aefa76f5cd | ||
|
|
c25137a99f | ||
|
|
e8e3e033ee | ||
|
|
27d3fc421a | ||
|
|
fbb758a7ce | ||
|
|
e70f0081da | ||
|
|
fe0ac98e66 | ||
|
|
7af5a63063 | ||
|
|
c998f25006 | ||
|
|
4a2d2c2b6f | ||
|
|
9ba32f6573 | ||
|
|
ffa5b757c7 | ||
|
|
971c980c6e | ||
|
|
d9b8cf81be | ||
|
|
0fb5189072 | ||
|
|
80793e813c | ||
|
|
ae38e0569f | ||
|
|
886eceba3e | ||
|
|
8767b63a82 | ||
|
|
0b39fa53b6 | ||
|
|
812ed6b0d5 | ||
|
|
0bab582fd6 | ||
|
|
dbd46decad | ||
|
|
1c5b8c6222 | ||
|
|
27a686e53b | ||
|
|
3320aaab3a | ||
|
|
1e4b4d85e7 | ||
|
|
1b09cf8658 | ||
|
|
9a6f2be572 | ||
|
|
c9f11d09fc | ||
|
|
119aa31b10 | ||
|
|
ef1c524bb3 | ||
|
|
2472e2e40d | ||
|
|
73fb6f3723 | ||
|
|
5426e13168 | ||
|
|
3a114fe105 | ||
|
|
902ef397af | ||
|
|
24ee3aecd5 | ||
|
|
1954438610 | ||
|
|
4dab867288 | ||
|
|
62fb643cdc | ||
|
|
97cbc96093 | ||
|
|
5906be8589 | ||
|
|
72bc285669 | ||
|
|
baa3f4a80d | ||
|
|
c886f976e0 | ||
|
|
29763f01c6 | ||
|
|
74f016d343 | ||
|
|
1f9df1cc7b | ||
|
|
5019945828 | ||
|
|
7777d353bf | ||
|
|
1dec31560e | ||
|
|
502728777c | ||
|
|
bb29bc2937 | ||
|
|
d514dac0b2 | ||
|
|
bdd201ea7f | ||
|
|
74fb729213 | ||
|
|
412c6e21a8 | ||
|
|
8a5f6ed130 | ||
|
|
c188bd2c12 | ||
|
|
20402aa128 | ||
|
|
6d86df73f1 | ||
|
|
87fa26006b | ||
|
|
ebba15ee7f | ||
|
|
861d663c15 | ||
|
|
e132ba79ae | ||
|
|
b13cac896d | ||
|
|
c03e3e8301 | ||
|
|
f299c5414c | ||
|
|
ce5f1cb98c | ||
|
|
6382914587 | ||
|
|
fb5acd7039 | ||
|
|
748aa38378 | ||
|
|
8cf7fbbce0 | ||
|
|
a3df04a899 | ||
|
|
2253b083d9 | ||
|
|
7809f0c022 | ||
|
|
6fadb560fc | ||
|
|
baee288fb4 | ||
|
|
1771f0045d | ||
|
|
e6e136decc | ||
|
|
c058aeb88d | ||
|
|
81b8080acd | ||
|
|
b7f7cc7ace | ||
|
|
d6de55bce9 | ||
|
|
3ad24ab386 | ||
|
|
1b63ccd848 | ||
|
|
09f6152a11 | ||
|
|
f70d0a1dd9 | ||
|
|
3039be82ce | ||
|
|
28bce1ac7c | ||
|
|
18bdac8ee4 | ||
|
|
aedfec3ad7 | ||
|
|
17e1e80726 | ||
|
|
af187805b3 | ||
|
|
96bdd661b8 | ||
|
|
0b6fbb28a8 | ||
|
|
e9906b0772 | ||
|
|
fb3469f53a | ||
|
|
f218705d2a | ||
|
|
2546f32b90 | ||
|
|
9d9cf3583b | ||
|
|
2bec3a4953 | ||
|
|
3de6cc245f | ||
|
|
156a461cbd | ||
|
|
c9456193d3 | ||
|
|
fb86217553 | ||
|
|
41546f946e | ||
|
|
a7f0161276 | ||
|
|
1016f303e5 | ||
|
|
107ad133fc | ||
|
|
af9f1c0764 | ||
|
|
d1b5b055be | ||
|
|
edeae53221 | ||
|
|
c32d359094 | ||
|
|
bf4db42920 | ||
|
|
977fa4a717 | ||
|
|
6881f21f3e | ||
|
|
8ed9e63432 | ||
|
|
d55bc4a8bf | ||
|
|
5d018d23f0 | ||
|
|
93fd3cbc7a | ||
|
|
3c076c79c5 | ||
|
|
a8f40a8302 | ||
|
|
55a0c98d16 | ||
|
|
0b36decfb6 | ||
|
|
312cc48e2b | ||
|
|
d02e41dcb2 | ||
|
|
da378af445 | ||
|
|
d2e3d5b9db | ||
|
|
76a58fdcce | ||
|
|
58af30a6c7 | ||
|
|
0f632f3a57 | ||
|
|
ad167c3849 | ||
|
|
f25f638c35 | ||
|
|
3ff3dfe5a3 | ||
|
|
f4a30d286f | ||
|
|
bc35503528 | ||
|
|
a4a9ded4d0 | ||
|
|
e5a0224837 | ||
|
|
dc4d74e44a | ||
|
|
c5288e9984 | ||
|
|
2e697d3013 | ||
|
|
0eefb76fa1 | ||
|
|
cf89266b98 | ||
|
|
02735e140f | ||
|
|
f31d4cb7a2 | ||
|
|
72167fb394 | ||
|
|
58a755cdc3 | ||
|
|
8fde611a8c | ||
|
|
8f15832950 | ||
|
|
9fe6ad5fef | ||
|
|
fe2f2fc530 | ||
|
|
6be336c0d8 | ||
|
|
3b7a35a59a | ||
|
|
a9bcae9f50 | ||
|
|
d4f91e7e9f | ||
|
|
4037d3220a | ||
|
|
123c04daa7 | ||
|
|
62a2d60d72 | ||
|
|
958d69f300 | ||
|
|
15056ca208 | ||
|
|
f92d05e254 | ||
|
|
7a48d0bab8 | ||
|
|
b4d5ff0af7 | ||
|
|
e23ab7f41a | ||
|
|
1ec7d656dd | ||
|
|
458e51df7a | ||
|
|
63eb4a1b62 | ||
|
|
8c97f6414c | ||
|
|
5c3eecc70f | ||
|
|
4e97eb89e5 | ||
|
|
448bcfd0f9 | ||
|
|
e6a6c4fbab | ||
|
|
c9964ba600 | ||
|
|
865077f1d1 | ||
|
|
aecae8f397 | ||
|
|
7c8c3b8437 | ||
|
|
3e013b7c8e | ||
|
|
2a12d76646 | ||
|
|
97a8b4caf7 | ||
|
|
df3a5db629 | ||
|
|
85b0bd8fe0 | ||
|
|
105e7f6ed3 | ||
|
|
3b476f5767 | ||
|
|
d94916852f | ||
|
|
84c6ea1af8 | ||
|
|
45df38e61b | ||
|
|
fa87004bc1 | ||
|
|
bd083a5fcf | ||
|
|
244953be3f | ||
|
|
08352d44f8 | ||
|
|
d74595e2ca | ||
|
|
1a93daf353 | ||
|
|
97bf307755 | ||
|
|
992333b995 | ||
|
|
8b16696b24 | ||
|
|
dde6ea7ff6 | ||
|
|
2e9cf7dda5 | ||
|
|
14c24c9037 | ||
|
|
a0ee2ec458 | ||
|
|
d1020653fc | ||
|
|
1f8bae7724 | ||
|
|
1cad8d7b6f | ||
|
|
0f2ecb961e | ||
|
|
26d742fed6 | ||
|
|
70e18cee00 | ||
|
|
b1605cdd23 | ||
|
|
618bd1ee76 | ||
|
|
f16aa3a44b | ||
|
|
c0a1301ccd | ||
|
|
baf081cd3b | ||
|
|
2d573e2e2b | ||
|
|
2276936bac | ||
|
|
f30a71a67b | ||
|
|
cf2972c818 | ||
|
|
c159803067 | ||
|
|
0c4a99607e | ||
|
|
62921fb53e | ||
|
|
32768e96d4 | ||
|
|
418635e68a | ||
|
|
adcd5368b0 | ||
|
|
73bbaf2bc6 | ||
|
|
3641784e8c | ||
|
|
65afc535a6 | ||
|
|
4806651744 | ||
|
|
fadfde9aaa | ||
|
|
18a466b84e | ||
|
|
3db1377b26 | ||
|
|
841b12867e | ||
|
|
73bf452666 | ||
|
|
22d2338ace | ||
|
|
1883223a01 | ||
|
|
4f6984aa88 | ||
|
|
cda4460d99 | ||
|
|
39e594b765 | ||
|
|
cf0006719d | ||
|
|
b2a629ef49 | ||
|
|
d9ea9881d2 | ||
|
|
c96322c8d2 | ||
|
|
0d0f6d12bc | ||
|
|
17c27df6ea | ||
|
|
80cfad233e | ||
|
|
720d30469f | ||
|
|
79f689e6c2 | ||
|
|
c560b791e1 | ||
|
|
8e513e7afc | ||
|
|
22e862304a | ||
|
|
0cb72812f9 | ||
|
|
f477ce4b1a | ||
|
|
66f5ff72fd | ||
|
|
2017369f7d | ||
|
|
8b0d5b171e | ||
|
|
5ea773c505 | ||
|
|
54437c48ca | ||
|
|
f337d2f0f0 | ||
|
|
0fd171770a | ||
|
|
826e6ec3bd | ||
|
|
f99554b15d | ||
|
|
dc7cf81267 | ||
|
|
f214bff0c0 | ||
|
|
dcca56baba | ||
|
|
c7095be913 | ||
|
|
7704873cb8 | ||
|
|
d7bd9651bc | ||
|
|
5c07c97c09 | ||
|
|
7b8bc61834 | ||
|
|
ced4fdaa84 | ||
|
|
2410335507 | ||
|
|
bd2e1a2aa8 | ||
|
|
ebc5ed1296 | ||
|
|
5c05ae7ba0 | ||
|
|
b73ce4ba81 | ||
|
|
356ed0438e | ||
|
|
6a85cb5ef7 | ||
|
|
a3e40bd5b4 | ||
|
|
cfc00068bd | ||
|
|
dd2851d576 | ||
|
|
10523241d8 | ||
|
|
82345bc09a | ||
|
|
7ad1d76356 | ||
|
|
b2a382efdb | ||
|
|
89c885909a | ||
|
|
8e1ada9e6f | ||
|
|
059d8c1a4e | ||
|
|
c618a5d348 | ||
|
|
6de09e07a6 | ||
|
|
fa8271c5ac | ||
|
|
9c70a02a9c | ||
|
|
1def298119 | ||
|
|
2091c91fde | ||
|
|
375162b3c3 | ||
|
|
65c5592b8e | ||
|
|
c831c5b2bb | ||
|
|
5ed7853bb0 | ||
|
|
f44354e17f | ||
|
|
d0d479c1af | ||
|
|
03cc8c4b5d | ||
|
|
eca4f5ac73 | ||
|
|
1b2067f53d | ||
|
|
e8c53b07f2 | ||
|
|
c8f35d8d38 | ||
|
|
fdefb9e29a | ||
|
|
37b524f971 | ||
|
|
823e13ddf4 | ||
|
|
18c516698e | ||
|
|
d86321300a | ||
|
|
d336b51331 | ||
|
|
5f158ec039 | ||
|
|
db0a50bc40 | ||
|
|
24aa0e0a5b | ||
|
|
4c17a87606 | ||
|
|
d445b3ae57 | ||
|
|
59f15309ca | ||
|
|
f369164761 | ||
|
|
6bb0357c94 | ||
|
|
a83577d64f | ||
|
|
39e9839a04 | ||
|
|
78a1cd36b5 | ||
|
|
0a4001eba1 | ||
|
|
38a6d3eea7 | ||
|
|
1890cfcf82 | ||
|
|
8ab3444fdf | ||
|
|
953dbb7980 | ||
|
|
b2a2e96ea6 | ||
|
|
351d9bd317 | ||
|
|
f77e997619 | ||
|
|
f281714583 | ||
|
|
3dd61d12cd | ||
|
|
4d122d295c | ||
|
|
65434da75d | ||
|
|
7b3bc755a3 | ||
|
|
d88421ab03 | ||
|
|
af67c7c1de | ||
|
|
824707383b | ||
|
|
73cb716b3c | ||
|
|
5e01e9ac19 | ||
|
|
f3615a8aa5 | ||
|
|
7556851665 | ||
|
|
43d175d17a | ||
|
|
b70e080b59 | ||
|
|
57eacee4f4 | ||
|
|
c142e5d16a | ||
|
|
4b1f7febc7 | ||
|
|
f9e99f9534 | ||
|
|
1af2fcd492 | ||
|
|
f05c7d62bc | ||
|
|
1a807dfe68 | ||
|
|
589d43d9cd | ||
|
|
9b1b79f3f5 | ||
|
|
ad8b909ce9 | ||
|
|
80cc82a445 | ||
|
|
b4f5416dd9 | ||
|
|
eadb13d2e9 | ||
|
|
7f0d8e4288 | ||
|
|
9ccea16d45 | ||
|
|
a6a776f3d8 | ||
|
|
9481707a52 | ||
|
|
0e5434264f | ||
|
|
1ee268d33d | ||
|
|
ee91ac179c | ||
|
|
822a0f0435 | ||
|
|
54283f3ed4 | ||
|
|
20332b278d | ||
|
|
c061d4f237 | ||
|
|
f6608a8805 | ||
|
|
426854e7bc | ||
|
|
463b072b12 | ||
|
|
d0b849c86d | ||
|
|
cb8d568cf9 | ||
|
|
463d5a8fde | ||
|
|
91753cae59 | ||
|
|
c7b48bd42d | ||
|
|
0ee9076ffe | ||
|
|
10fe904d88 | ||
|
|
9f3c0a8556 | ||
|
|
65dd5543f6 | ||
|
|
8ee69f299c |
@@ -6,6 +6,7 @@ services:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.5
|
||||
@@ -16,6 +17,6 @@ services:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ..:/src
|
||||
|
||||
@@ -6,6 +6,7 @@ services:
|
||||
image: postgres:11
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
@@ -16,6 +17,6 @@ services:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ..:/src
|
||||
|
||||
@@ -6,6 +6,7 @@ services:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
@@ -16,6 +17,6 @@ services:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ..:/src
|
||||
|
||||
@@ -1,3 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from tap.parser import Parser
|
||||
from tap.line import Result, Unknown, Diagnostic
|
||||
|
||||
@@ -27,7 +27,7 @@ git config --global user.name "A robot"
|
||||
|
||||
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||
git fetch -u origin $GITBASE
|
||||
git merge --no-edit origin/$GITBASE
|
||||
git merge --no-edit --no-commit origin/$GITBASE
|
||||
|
||||
# Show what we are after.
|
||||
git --no-pager show -s
|
||||
|
||||
@@ -1,226 +0,0 @@
|
||||
env:
|
||||
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
||||
|
||||
steps:
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check_codestyle"
|
||||
label: "\U0001F9F9 Check Style"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e packaging"
|
||||
label: "\U0001F9F9 packaging"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check_isort"
|
||||
label: "\U0001F9F9 isort"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "scripts-dev/check-newsfragment"
|
||||
label: ":newspaper: Newsfile"
|
||||
branches: "!master !develop !release-*"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
propagate-environment: true
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check-sampleconfig"
|
||||
label: "\U0001F9F9 check-sample-config"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- wait
|
||||
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py35-old,codecov"
|
||||
label: ":python: 3.5 / SQLite / Old Deps"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.5"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py35,codecov"
|
||||
label: ":python: 3.5 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.5"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py36,codecov"
|
||||
label: ":python: 3.6 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py37,codecov"
|
||||
label: ":python: 3.7 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.7"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.5 / :postgres: 9.5"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 4"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py35.pg95.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 9.5"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 4"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg95.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 11"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 4"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg11.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
|
||||
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash .buildkite/synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash .buildkite/synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
WORKERS: "1"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash .buildkite/synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
soft_fail: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
@@ -1,145 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Fetch sytest, and then run the tests for synapse. The entrypoint for the
|
||||
# sytest-synapse docker images.
|
||||
|
||||
set -ex
|
||||
|
||||
if [ -n "$BUILDKITE" ]
|
||||
then
|
||||
SYNAPSE_DIR=`pwd`
|
||||
else
|
||||
SYNAPSE_DIR="/src"
|
||||
fi
|
||||
|
||||
# Attempt to find a sytest to use.
|
||||
# If /sytest exists, it means that a SyTest checkout has been mounted into the Docker image.
|
||||
if [ -d "/sytest" ]; then
|
||||
# If the user has mounted in a SyTest checkout, use that.
|
||||
echo "Using local sytests..."
|
||||
|
||||
# create ourselves a working directory and dos2unix some scripts therein
|
||||
mkdir -p /work/jenkins
|
||||
for i in install-deps.pl run-tests.pl tap-to-junit-xml.pl jenkins/prep_sytest_for_postgres.sh; do
|
||||
dos2unix -n "/sytest/$i" "/work/$i"
|
||||
done
|
||||
ln -sf /sytest/tests /work
|
||||
ln -sf /sytest/keys /work
|
||||
SYTEST_LIB="/sytest/lib"
|
||||
else
|
||||
if [ -n "BUILDKITE_BRANCH" ]
|
||||
then
|
||||
branch_name=$BUILDKITE_BRANCH
|
||||
else
|
||||
# Otherwise, try and find out what the branch that the Synapse checkout is using. Fall back to develop if it's not a branch.
|
||||
branch_name="$(git --git-dir=/src/.git symbolic-ref HEAD 2>/dev/null)" || branch_name="develop"
|
||||
fi
|
||||
|
||||
# Try and fetch the branch
|
||||
echo "Trying to get same-named sytest branch..."
|
||||
wget -q https://github.com/matrix-org/sytest/archive/$branch_name.tar.gz -O sytest.tar.gz || {
|
||||
# Probably a 404, fall back to develop
|
||||
echo "Using develop instead..."
|
||||
wget -q https://github.com/matrix-org/sytest/archive/develop.tar.gz -O sytest.tar.gz
|
||||
}
|
||||
|
||||
mkdir -p /work
|
||||
tar -C /work --strip-components=1 -xf sytest.tar.gz
|
||||
SYTEST_LIB="/work/lib"
|
||||
fi
|
||||
|
||||
cd /work
|
||||
|
||||
# PostgreSQL setup
|
||||
if [ -n "$POSTGRES" ]
|
||||
then
|
||||
export PGUSER=postgres
|
||||
export POSTGRES_DB_1=pg1
|
||||
export POSTGRES_DB_2=pg2
|
||||
|
||||
# Start the database
|
||||
su -c 'eatmydata /usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres
|
||||
|
||||
# Use the Jenkins script to write out the configuration for a PostgreSQL using Synapse
|
||||
jenkins/prep_sytest_for_postgres.sh
|
||||
|
||||
# Make the test databases for the two Synapse servers that will be spun up
|
||||
su -c 'psql -c "CREATE DATABASE pg1;"' postgres
|
||||
su -c 'psql -c "CREATE DATABASE pg2;"' postgres
|
||||
|
||||
fi
|
||||
|
||||
if [ -n "$OFFLINE" ]; then
|
||||
# if we're in offline mode, just put synapse into the virtualenv, and
|
||||
# hope that the deps are up-to-date.
|
||||
#
|
||||
# (`pip install -e` likes to reinstall setuptools even if it's already installed,
|
||||
# so we just run setup.py explicitly.)
|
||||
#
|
||||
(cd $SYNAPSE_DIR && /venv/bin/python setup.py -q develop)
|
||||
else
|
||||
# We've already created the virtualenv, but lets double check we have all
|
||||
# deps.
|
||||
/venv/bin/pip install -q --upgrade --no-cache-dir -e $SYNAPSE_DIR
|
||||
/venv/bin/pip install -q --upgrade --no-cache-dir \
|
||||
lxml psycopg2 coverage codecov tap.py
|
||||
|
||||
# Make sure all Perl deps are installed -- this is done in the docker build
|
||||
# so will only install packages added since the last Docker build
|
||||
./install-deps.pl
|
||||
fi
|
||||
|
||||
|
||||
# Run the tests
|
||||
>&2 echo "+++ Running tests"
|
||||
|
||||
RUN_TESTS=(
|
||||
perl -I "$SYTEST_LIB" ./run-tests.pl --python=/venv/bin/python --synapse-directory=$SYNAPSE_DIR --coverage -O tap --all
|
||||
)
|
||||
|
||||
TEST_STATUS=0
|
||||
|
||||
if [ -n "$WORKERS" ]; then
|
||||
RUN_TESTS+=(-I Synapse::ViaHaproxy --dendron-binary=/pydron.py)
|
||||
else
|
||||
RUN_TESTS+=(-I Synapse)
|
||||
fi
|
||||
|
||||
"${RUN_TESTS[@]}" "$@" > results.tap || TEST_STATUS=$?
|
||||
|
||||
if [ $TEST_STATUS -ne 0 ]; then
|
||||
>&2 echo -e "run-tests \e[31mFAILED\e[0m: exit code $TEST_STATUS"
|
||||
else
|
||||
>&2 echo -e "run-tests \e[32mPASSED\e[0m"
|
||||
fi
|
||||
|
||||
>&2 echo "--- Copying assets"
|
||||
|
||||
# Copy out the logs
|
||||
mkdir -p /logs
|
||||
cp results.tap /logs/results.tap
|
||||
rsync --ignore-missing-args --min-size=1B -av server-0 server-1 /logs --include "*/" --include="*.log.*" --include="*.log" --exclude="*"
|
||||
|
||||
# Upload coverage to codecov and upload files, if running on Buildkite
|
||||
if [ -n "$BUILDKITE" ]
|
||||
then
|
||||
/venv/bin/coverage combine || true
|
||||
/venv/bin/coverage xml || true
|
||||
/venv/bin/codecov -X gcov -f coverage.xml
|
||||
|
||||
wget -O buildkite.tar.gz https://github.com/buildkite/agent/releases/download/v3.13.0/buildkite-agent-linux-amd64-3.13.0.tar.gz
|
||||
tar xvf buildkite.tar.gz
|
||||
chmod +x ./buildkite-agent
|
||||
|
||||
# Upload the files
|
||||
./buildkite-agent artifact upload "/logs/**/*.log*"
|
||||
./buildkite-agent artifact upload "/logs/results.tap"
|
||||
|
||||
if [ $TEST_STATUS -ne 0 ]; then
|
||||
# Annotate, if failure
|
||||
/venv/bin/python $SYNAPSE_DIR/.buildkite/format_tap.py /logs/results.tap "$BUILDKITE_LABEL" | ./buildkite-agent annotate --style="error" --context="$BUILDKITE_LABEL"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
exit $TEST_STATUS
|
||||
30
.buildkite/worker-blacklist
Normal file
30
.buildkite/worker-blacklist
Normal file
@@ -0,0 +1,30 @@
|
||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
||||
|
||||
Message history can be paginated
|
||||
|
||||
Can re-join room if re-invited
|
||||
|
||||
/upgrade creates a new room
|
||||
|
||||
The only membership state included in an initial sync is for all the senders in the timeline
|
||||
|
||||
Local device key changes get to remote servers
|
||||
|
||||
If remote user leaves room we no longer receive device updates
|
||||
|
||||
Forgotten room messages cannot be paginated
|
||||
|
||||
Inbound federation can get public room list
|
||||
|
||||
Members from the gap are included in gappy incr LL sync
|
||||
|
||||
Leaves are present in non-gapped incremental syncs
|
||||
|
||||
Old leaves are present in gapped incremental syncs
|
||||
|
||||
User sees updates to presence from other users in the incremental sync.
|
||||
|
||||
Gapped incremental syncs include all state changes
|
||||
|
||||
Old members are included in gappy incr LL sync if they start speaking
|
||||
@@ -1,5 +1,4 @@
|
||||
comment:
|
||||
layout: "diff"
|
||||
comment: off
|
||||
|
||||
coverage:
|
||||
status:
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
[run]
|
||||
branch = True
|
||||
parallel = True
|
||||
include = synapse/*
|
||||
include=$TOP/synapse/*
|
||||
data_file = $TOP/.coverage
|
||||
|
||||
[report]
|
||||
precision = 2
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -16,9 +16,11 @@ _trial_temp*/
|
||||
/*.log
|
||||
/*.log.config
|
||||
/*.pid
|
||||
/.python-version
|
||||
/*.signing.key
|
||||
/env/
|
||||
/homeserver*.yaml
|
||||
/logs
|
||||
/media_store/
|
||||
/uploads
|
||||
|
||||
@@ -28,8 +30,9 @@ _trial_temp*/
|
||||
/.vscode/
|
||||
|
||||
# build products
|
||||
/.coverage*
|
||||
!/.coveragerc
|
||||
/.coverage*
|
||||
/.mypy_cache/
|
||||
/.tox
|
||||
/build/
|
||||
/coverage.*
|
||||
@@ -37,4 +40,3 @@ _trial_temp*/
|
||||
/docs/build/
|
||||
/htmlcov
|
||||
/pip-wheel-metadata/
|
||||
|
||||
|
||||
251
CHANGES.md
251
CHANGES.md
@@ -1,3 +1,254 @@
|
||||
Synapse 1.3.1 (2019-08-17)
|
||||
==========================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Drop hard dependency on `sdnotify` python package. ([\#5871](https://github.com/matrix-org/synapse/issues/5871))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix startup issue (hang on ACME provisioning) due to ordering of Twisted reactor startup. Thanks to @chrismoos for supplying the fix. ([\#5867](https://github.com/matrix-org/synapse/issues/5867))
|
||||
|
||||
|
||||
Synapse 1.3.0 (2019-08-15)
|
||||
==========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix 500 Internal Server Error on `publicRooms` when the public room list was
|
||||
cached. ([\#5851](https://github.com/matrix-org/synapse/issues/5851))
|
||||
|
||||
|
||||
Synapse 1.3.0rc1 (2019-08-13)
|
||||
==========================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Use `M_USER_DEACTIVATED` instead of `M_UNKNOWN` for errcode when a deactivated user attempts to login. ([\#5686](https://github.com/matrix-org/synapse/issues/5686))
|
||||
- Add sd_notify hooks to ease systemd integration and allows usage of Type=Notify. ([\#5732](https://github.com/matrix-org/synapse/issues/5732))
|
||||
- Synapse will no longer serve any media repo admin endpoints when `enable_media_repo` is set to False in the configuration. If a media repo worker is used, the admin APIs relating to the media repo will be served from it instead. ([\#5754](https://github.com/matrix-org/synapse/issues/5754), [\#5848](https://github.com/matrix-org/synapse/issues/5848))
|
||||
- Synapse can now be configured to not join remote rooms of a given "complexity" (currently, state events) over federation. This option can be used to prevent adverse performance on resource-constrained homeservers. ([\#5783](https://github.com/matrix-org/synapse/issues/5783))
|
||||
- Allow defining HTML templates to serve the user on account renewal attempt when using the account validity feature. ([\#5807](https://github.com/matrix-org/synapse/issues/5807))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix UISIs during homeserver outage. ([\#5693](https://github.com/matrix-org/synapse/issues/5693), [\#5789](https://github.com/matrix-org/synapse/issues/5789))
|
||||
- Fix stack overflow in server key lookup code. ([\#5724](https://github.com/matrix-org/synapse/issues/5724))
|
||||
- start.sh no longer uses deprecated cli option. ([\#5725](https://github.com/matrix-org/synapse/issues/5725))
|
||||
- Log when we receive an event receipt from an unexpected origin. ([\#5743](https://github.com/matrix-org/synapse/issues/5743))
|
||||
- Fix debian packaging scripts to correctly build sid packages. ([\#5775](https://github.com/matrix-org/synapse/issues/5775))
|
||||
- Correctly handle redactions of redactions. ([\#5788](https://github.com/matrix-org/synapse/issues/5788))
|
||||
- Return 404 instead of 403 when accessing /rooms/{roomId}/event/{eventId} for an event without the appropriate permissions. ([\#5798](https://github.com/matrix-org/synapse/issues/5798))
|
||||
- Fix check that tombstone is a state event in push rules. ([\#5804](https://github.com/matrix-org/synapse/issues/5804))
|
||||
- Fix error when trying to login as a deactivated user when using a worker to handle login. ([\#5806](https://github.com/matrix-org/synapse/issues/5806))
|
||||
- Fix bug where user `/sync` stream could get wedged in rare circumstances. ([\#5825](https://github.com/matrix-org/synapse/issues/5825))
|
||||
- The purge_remote_media.sh script was fixed. ([\#5839](https://github.com/matrix-org/synapse/issues/5839))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Synapse now no longer accepts the `-v`/`--verbose`, `-f`/`--log-file`, or `--log-config` command line flags, and removes the deprecated `verbose` and `log_file` configuration file options. Users of these options should migrate their options into the dedicated log configuration. ([\#5678](https://github.com/matrix-org/synapse/issues/5678), [\#5729](https://github.com/matrix-org/synapse/issues/5729))
|
||||
- Remove non-functional 'expire_access_token' setting. ([\#5782](https://github.com/matrix-org/synapse/issues/5782))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Make Jaeger fully configurable. ([\#5694](https://github.com/matrix-org/synapse/issues/5694))
|
||||
- Add precautionary measures to prevent future abuse of `window.opener` in default welcome page. ([\#5695](https://github.com/matrix-org/synapse/issues/5695))
|
||||
- Reduce database IO usage by optimising queries for current membership. ([\#5706](https://github.com/matrix-org/synapse/issues/5706), [\#5738](https://github.com/matrix-org/synapse/issues/5738), [\#5746](https://github.com/matrix-org/synapse/issues/5746), [\#5752](https://github.com/matrix-org/synapse/issues/5752), [\#5770](https://github.com/matrix-org/synapse/issues/5770), [\#5774](https://github.com/matrix-org/synapse/issues/5774), [\#5792](https://github.com/matrix-org/synapse/issues/5792), [\#5793](https://github.com/matrix-org/synapse/issues/5793))
|
||||
- Improve caching when fetching `get_filtered_current_state_ids`. ([\#5713](https://github.com/matrix-org/synapse/issues/5713))
|
||||
- Don't accept opentracing data from clients. ([\#5715](https://github.com/matrix-org/synapse/issues/5715))
|
||||
- Speed up PostgreSQL unit tests in CI. ([\#5717](https://github.com/matrix-org/synapse/issues/5717))
|
||||
- Update the coding style document. ([\#5719](https://github.com/matrix-org/synapse/issues/5719))
|
||||
- Improve database query performance when recording retry intervals for remote hosts. ([\#5720](https://github.com/matrix-org/synapse/issues/5720))
|
||||
- Add a set of opentracing utils. ([\#5722](https://github.com/matrix-org/synapse/issues/5722))
|
||||
- Cache result of get_version_string to reduce overhead of `/version` federation requests. ([\#5730](https://github.com/matrix-org/synapse/issues/5730))
|
||||
- Return 'user_type' in admin API user endpoints results. ([\#5731](https://github.com/matrix-org/synapse/issues/5731))
|
||||
- Don't package the sytest test blacklist file. ([\#5733](https://github.com/matrix-org/synapse/issues/5733))
|
||||
- Replace uses of returnValue with plain return, as returnValue is not needed on Python 3. ([\#5736](https://github.com/matrix-org/synapse/issues/5736))
|
||||
- Blacklist some flakey tests in worker mode. ([\#5740](https://github.com/matrix-org/synapse/issues/5740))
|
||||
- Fix some error cases in the caching layer. ([\#5749](https://github.com/matrix-org/synapse/issues/5749))
|
||||
- Add a prometheus metric for pending cache lookups. ([\#5750](https://github.com/matrix-org/synapse/issues/5750))
|
||||
- Stop trying to fetch events with event_id=None. ([\#5753](https://github.com/matrix-org/synapse/issues/5753))
|
||||
- Convert RedactionTestCase to modern test style. ([\#5768](https://github.com/matrix-org/synapse/issues/5768))
|
||||
- Allow looping calls to be given arguments. ([\#5780](https://github.com/matrix-org/synapse/issues/5780))
|
||||
- Set the logs emitted when checking typing and presence timeouts to DEBUG level, not INFO. ([\#5785](https://github.com/matrix-org/synapse/issues/5785))
|
||||
- Remove DelayedCall debugging from the test suite, as it is no longer required in the vast majority of Synapse's tests. ([\#5787](https://github.com/matrix-org/synapse/issues/5787))
|
||||
- Remove some spurious exceptions from the logs where we failed to talk to a remote server. ([\#5790](https://github.com/matrix-org/synapse/issues/5790))
|
||||
- Improve performance when making `.well-known` requests by sharing the SSL options between requests. ([\#5794](https://github.com/matrix-org/synapse/issues/5794))
|
||||
- Disable codecov GitHub comments on PRs. ([\#5796](https://github.com/matrix-org/synapse/issues/5796))
|
||||
- Don't allow clients to send tombstone events that reference the room it's sent in. ([\#5801](https://github.com/matrix-org/synapse/issues/5801))
|
||||
- Deny redactions of events sent in a different room. ([\#5802](https://github.com/matrix-org/synapse/issues/5802))
|
||||
- Deny sending well known state types as non-state events. ([\#5805](https://github.com/matrix-org/synapse/issues/5805))
|
||||
- Handle incorrectly encoded query params correctly by returning a 400. ([\#5808](https://github.com/matrix-org/synapse/issues/5808))
|
||||
- Handle pusher being deleted during processing rather than logging an exception. ([\#5809](https://github.com/matrix-org/synapse/issues/5809))
|
||||
- Return 502 not 500 when failing to reach any remote server. ([\#5810](https://github.com/matrix-org/synapse/issues/5810))
|
||||
- Reduce global pauses in the events stream caused by expensive state resolution during persistence. ([\#5826](https://github.com/matrix-org/synapse/issues/5826))
|
||||
- Add a lower bound to well-known lookup cache time to avoid repeated lookups. ([\#5836](https://github.com/matrix-org/synapse/issues/5836))
|
||||
- Whitelist history visbility sytests in worker mode tests. ([\#5843](https://github.com/matrix-org/synapse/issues/5843))
|
||||
|
||||
|
||||
Synapse 1.2.1 (2019-07-26)
|
||||
==========================
|
||||
|
||||
Security update
|
||||
---------------
|
||||
|
||||
This release includes *four* security fixes:
|
||||
|
||||
- Prevent an attack where a federated server could send redactions for arbitrary events in v1 and v2 rooms. ([\#5767](https://github.com/matrix-org/synapse/issues/5767))
|
||||
- Prevent a denial-of-service attack where cycles of redaction events would make Synapse spin infinitely. Thanks to `@lrizika:matrix.org` for identifying and responsibly disclosing this issue. ([0f2ecb961](https://github.com/matrix-org/synapse/commit/0f2ecb961))
|
||||
- Prevent an attack where users could be joined or parted from public rooms without their consent. Thanks to @dylangerdaly for identifying and responsibly disclosing this issue. ([\#5744](https://github.com/matrix-org/synapse/issues/5744))
|
||||
- Fix a vulnerability where a federated server could spoof read-receipts from
|
||||
users on other servers. Thanks to @dylangerdaly for identifying this issue too. ([\#5743](https://github.com/matrix-org/synapse/issues/5743))
|
||||
|
||||
Additionally, the following fix was in Synapse **1.2.0**, but was not correctly
|
||||
identified during the original release:
|
||||
|
||||
- It was possible for a room moderator to send a redaction for an `m.room.create` event, which would downgrade the room to version 1. Thanks to `/dev/ponies` for identifying and responsibly disclosing this issue! ([\#5701](https://github.com/matrix-org/synapse/issues/5701))
|
||||
|
||||
Synapse 1.2.0 (2019-07-25)
|
||||
==========================
|
||||
|
||||
No significant changes.
|
||||
|
||||
|
||||
Synapse 1.2.0rc2 (2019-07-24)
|
||||
=============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a regression introduced in v1.2.0rc1 which led to incorrect labels on some prometheus metrics. ([\#5734](https://github.com/matrix-org/synapse/issues/5734))
|
||||
|
||||
|
||||
Synapse 1.2.0rc1 (2019-07-22)
|
||||
=============================
|
||||
|
||||
Security fixes
|
||||
--------------
|
||||
|
||||
This update included a security fix which was initially incorrectly flagged as
|
||||
a regular bug fix.
|
||||
|
||||
- It was possible for a room moderator to send a redaction for an `m.room.create` event, which would downgrade the room to version 1. Thanks to `/dev/ponies` for identifying and responsibly disclosing this issue! ([\#5701](https://github.com/matrix-org/synapse/issues/5701))
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add support for opentracing. ([\#5544](https://github.com/matrix-org/synapse/issues/5544), [\#5712](https://github.com/matrix-org/synapse/issues/5712))
|
||||
- Add ability to pull all locally stored events out of synapse that a particular user can see. ([\#5589](https://github.com/matrix-org/synapse/issues/5589))
|
||||
- Add a basic admin command app to allow server operators to run Synapse admin commands separately from the main production instance. ([\#5597](https://github.com/matrix-org/synapse/issues/5597))
|
||||
- Add `sender` and `origin_server_ts` fields to `m.replace`. ([\#5613](https://github.com/matrix-org/synapse/issues/5613))
|
||||
- Add default push rule to ignore reactions. ([\#5623](https://github.com/matrix-org/synapse/issues/5623))
|
||||
- Include the original event when asking for its relations. ([\#5626](https://github.com/matrix-org/synapse/issues/5626))
|
||||
- Implement `session_lifetime` configuration option, after which access tokens will expire. ([\#5660](https://github.com/matrix-org/synapse/issues/5660))
|
||||
- Return "This account has been deactivated" when a deactivated user tries to login. ([\#5674](https://github.com/matrix-org/synapse/issues/5674))
|
||||
- Enable aggregations support by default ([\#5714](https://github.com/matrix-org/synapse/issues/5714))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix 'utime went backwards' errors on daemonization. ([\#5609](https://github.com/matrix-org/synapse/issues/5609))
|
||||
- Various minor fixes to the federation request rate limiter. ([\#5621](https://github.com/matrix-org/synapse/issues/5621))
|
||||
- Forbid viewing relations on an event once it has been redacted. ([\#5629](https://github.com/matrix-org/synapse/issues/5629))
|
||||
- Fix requests to the `/store_invite` endpoint of identity servers being sent in the wrong format. ([\#5638](https://github.com/matrix-org/synapse/issues/5638))
|
||||
- Fix newly-registered users not being able to lookup their own profile without joining a room. ([\#5644](https://github.com/matrix-org/synapse/issues/5644))
|
||||
- Fix bug in #5626 that prevented the original_event field from actually having the contents of the original event in a call to `/relations`. ([\#5654](https://github.com/matrix-org/synapse/issues/5654))
|
||||
- Fix 3PID bind requests being sent to identity servers as `application/x-form-www-urlencoded` data, which is deprecated. ([\#5658](https://github.com/matrix-org/synapse/issues/5658))
|
||||
- Fix some problems with authenticating redactions in recent room versions. ([\#5699](https://github.com/matrix-org/synapse/issues/5699), [\#5700](https://github.com/matrix-org/synapse/issues/5700), [\#5707](https://github.com/matrix-org/synapse/issues/5707))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Base Docker image on a newer Alpine Linux version (3.8 -> 3.10). ([\#5619](https://github.com/matrix-org/synapse/issues/5619))
|
||||
- Add missing space in default logging file format generated by the Docker image. ([\#5620](https://github.com/matrix-org/synapse/issues/5620))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add information about nginx normalisation to reverse_proxy.rst. Contributed by @skalarproduktraum - thanks! ([\#5397](https://github.com/matrix-org/synapse/issues/5397))
|
||||
- --no-pep517 should be --no-use-pep517 in the documentation to setup the development environment. ([\#5651](https://github.com/matrix-org/synapse/issues/5651))
|
||||
- Improvements to Postgres setup instructions. Contributed by @Lrizika - thanks! ([\#5661](https://github.com/matrix-org/synapse/issues/5661))
|
||||
- Minor tweaks to postgres documentation. ([\#5675](https://github.com/matrix-org/synapse/issues/5675))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove support for the `invite_3pid_guest` configuration setting. ([\#5625](https://github.com/matrix-org/synapse/issues/5625))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Move logging code out of `synapse.util` and into `synapse.logging`. ([\#5606](https://github.com/matrix-org/synapse/issues/5606), [\#5617](https://github.com/matrix-org/synapse/issues/5617))
|
||||
- Add a blacklist file to the repo to blacklist certain sytests from failing CI. ([\#5611](https://github.com/matrix-org/synapse/issues/5611))
|
||||
- Make runtime errors surrounding password reset emails much clearer. ([\#5616](https://github.com/matrix-org/synapse/issues/5616))
|
||||
- Remove dead code for persiting outgoing federation transactions. ([\#5622](https://github.com/matrix-org/synapse/issues/5622))
|
||||
- Add `lint.sh` to the scripts-dev folder which will run all linting steps required by CI. ([\#5627](https://github.com/matrix-org/synapse/issues/5627))
|
||||
- Move RegistrationHandler.get_or_create_user to test code. ([\#5628](https://github.com/matrix-org/synapse/issues/5628))
|
||||
- Add some more common python virtual-environment paths to the black exclusion list. ([\#5630](https://github.com/matrix-org/synapse/issues/5630))
|
||||
- Some counter metrics exposed over Prometheus have been renamed, with the old names preserved for backwards compatibility and deprecated. See `docs/metrics-howto.rst` for details. ([\#5636](https://github.com/matrix-org/synapse/issues/5636))
|
||||
- Unblacklist some user_directory sytests. ([\#5637](https://github.com/matrix-org/synapse/issues/5637))
|
||||
- Factor out some redundant code in the login implementation. ([\#5639](https://github.com/matrix-org/synapse/issues/5639))
|
||||
- Update ModuleApi to avoid register(generate_token=True). ([\#5640](https://github.com/matrix-org/synapse/issues/5640))
|
||||
- Remove access-token support from `RegistrationHandler.register`, and rename it. ([\#5641](https://github.com/matrix-org/synapse/issues/5641))
|
||||
- Remove access-token support from `RegistrationStore.register`, and rename it. ([\#5642](https://github.com/matrix-org/synapse/issues/5642))
|
||||
- Improve logging for auto-join when a new user is created. ([\#5643](https://github.com/matrix-org/synapse/issues/5643))
|
||||
- Remove unused and unnecessary check for FederationDeniedError in _exception_to_failure. ([\#5645](https://github.com/matrix-org/synapse/issues/5645))
|
||||
- Fix a small typo in a code comment. ([\#5655](https://github.com/matrix-org/synapse/issues/5655))
|
||||
- Clean up exception handling around client access tokens. ([\#5656](https://github.com/matrix-org/synapse/issues/5656))
|
||||
- Add a mechanism for per-test homeserver configuration in the unit tests. ([\#5657](https://github.com/matrix-org/synapse/issues/5657))
|
||||
- Inline issue_access_token. ([\#5659](https://github.com/matrix-org/synapse/issues/5659))
|
||||
- Update the sytest BuildKite configuration to checkout Synapse in `/src`. ([\#5664](https://github.com/matrix-org/synapse/issues/5664))
|
||||
- Add a `docker` type to the towncrier configuration. ([\#5673](https://github.com/matrix-org/synapse/issues/5673))
|
||||
- Convert `synapse.federation.transport.server` to `async`. Might improve some stack traces. ([\#5689](https://github.com/matrix-org/synapse/issues/5689))
|
||||
- Documentation for opentracing. ([\#5703](https://github.com/matrix-org/synapse/issues/5703))
|
||||
|
||||
|
||||
Synapse 1.1.0 (2019-07-04)
|
||||
==========================
|
||||
|
||||
As of v1.1.0, Synapse no longer supports Python 2, nor Postgres version 9.4.
|
||||
See the [upgrade notes](UPGRADE.rst#upgrading-to-v110) for more details.
|
||||
|
||||
This release also deprecates the use of environment variables to configure the
|
||||
docker image. See the [docker README](https://github.com/matrix-org/synapse/blob/release-v1.1.0/docker/README.md#legacy-dynamic-configuration-file-support)
|
||||
for more details.
|
||||
|
||||
No changes since 1.1.0rc2.
|
||||
|
||||
|
||||
Synapse 1.1.0rc2 (2019-07-03)
|
||||
=============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix regression in 1.1rc1 where OPTIONS requests to the media repo would fail. ([\#5593](https://github.com/matrix-org/synapse/issues/5593))
|
||||
- Removed the `SYNAPSE_SMTP_*` docker container environment variables. Using these environment variables prevented the docker container from starting in Synapse v1.0, even though they didn't actually allow any functionality anyway. ([\#5596](https://github.com/matrix-org/synapse/issues/5596))
|
||||
- Fix a number of "Starting txn from sentinel context" warnings. ([\#5605](https://github.com/matrix-org/synapse/issues/5605))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Update github templates. ([\#5552](https://github.com/matrix-org/synapse/issues/5552))
|
||||
|
||||
|
||||
Synapse 1.1.0rc1 (2019-07-02)
|
||||
=============================
|
||||
|
||||
|
||||
@@ -30,11 +30,10 @@ use github's pull request workflow to review the contribution, and either ask
|
||||
you to make any refinements needed or merge it and make them ourselves. The
|
||||
changes will then land on master when we next do a release.
|
||||
|
||||
We use `CircleCI <https://circleci.com/gh/matrix-org>`_ and `Buildkite
|
||||
<https://buildkite.com/matrix-dot-org/synapse>`_ for continuous integration.
|
||||
Buildkite builds need to be authorised by a maintainer. If your change breaks
|
||||
the build, this will be shown in GitHub, so please keep an eye on the pull
|
||||
request for feedback.
|
||||
We use `Buildkite <https://buildkite.com/matrix-dot-org/synapse>`_ for
|
||||
continuous integration. Buildkite builds need to be authorised by a
|
||||
maintainer. If your change breaks the build, this will be shown in GitHub, so
|
||||
please keep an eye on the pull request for feedback.
|
||||
|
||||
To run unit tests in a local development environment, you can use:
|
||||
|
||||
@@ -70,13 +69,21 @@ All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||
entry. These are managed by Towncrier
|
||||
(https://github.com/hawkowl/towncrier).
|
||||
|
||||
To create a changelog entry, make a new file in the ``changelog.d``
|
||||
file named in the format of ``PRnumber.type``. The type can be
|
||||
one of ``feature``, ``bugfix``, ``removal`` (also used for
|
||||
deprecations), or ``misc`` (for internal-only changes).
|
||||
To create a changelog entry, make a new file in the ``changelog.d`` file named
|
||||
in the format of ``PRnumber.type``. The type can be one of the following:
|
||||
|
||||
The content of the file is your changelog entry, which can contain Markdown
|
||||
formatting. The entry should end with a full stop ('.') for consistency.
|
||||
* ``feature``.
|
||||
* ``bugfix``.
|
||||
* ``docker`` (for updates to the Docker image).
|
||||
* ``doc`` (for updates to the documentation).
|
||||
* ``removal`` (also used for deprecations).
|
||||
* ``misc`` (for internal-only changes).
|
||||
|
||||
The content of the file is your changelog entry, which should be a short
|
||||
description of your change in the same style as the rest of our `changelog
|
||||
<https://github.com/matrix-org/synapse/blob/master/CHANGES.md>`_. The file can
|
||||
contain Markdown formatting, and should end with a full stop ('.') for
|
||||
consistency.
|
||||
|
||||
Adding credits to the changelog is encouraged, we value your
|
||||
contributions and would like to have you shouted out in the release notes!
|
||||
|
||||
11
INSTALL.md
11
INSTALL.md
@@ -36,7 +36,7 @@ that your email address is probably `user@example.com` rather than
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.5, 3.6, 3.7, or 2.7
|
||||
- Python 3.5, 3.6, or 3.7
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
Synapse is written in Python but some of the libraries it uses are written in
|
||||
@@ -419,12 +419,11 @@ If Synapse is not configured with an SMTP server, password reset via email will
|
||||
|
||||
## Registering a user
|
||||
|
||||
You will need at least one user on your server in order to use a Matrix
|
||||
client. Users can be registered either via a Matrix client, or via a
|
||||
commandline script.
|
||||
The easiest way to create a new user is to do so from a client like [Riot](https://riot.im).
|
||||
|
||||
To get started, it is easiest to use the command line to register new
|
||||
users. This can be done as follows:
|
||||
Alternatively you can do so from the command line if you have installed via pip.
|
||||
|
||||
This can be done as follows:
|
||||
|
||||
```
|
||||
$ source ~/synapse/env/bin/activate
|
||||
|
||||
@@ -33,6 +33,7 @@ exclude Dockerfile
|
||||
exclude .dockerignore
|
||||
exclude test_postgresql.sh
|
||||
exclude .editorconfig
|
||||
exclude sytest-blacklist
|
||||
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
@@ -272,7 +272,7 @@ to install using pip and a virtualenv::
|
||||
|
||||
virtualenv -p python3 env
|
||||
source env/bin/activate
|
||||
python -m pip install --no-pep-517 -e .[all]
|
||||
python -m pip install --no-use-pep517 -e .[all]
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
|
||||
@@ -49,6 +49,13 @@ returned by the Client-Server API:
|
||||
# configured on port 443.
|
||||
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||
|
||||
Upgrading to v1.2.0
|
||||
===================
|
||||
|
||||
Some counter metrics have been renamed, with the old names deprecated. See
|
||||
`the metrics documentation <docs/metrics-howto.rst#renaming-of-metrics--deprecation-of-old-names-in-12>`_
|
||||
for details.
|
||||
|
||||
Upgrading to v1.1.0
|
||||
===================
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Update github templates.
|
||||
1
changelog.d/5633.bugfix
Normal file
1
changelog.d/5633.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Don't create broken room when power_level_content_override.users does not contain creator_id.
|
||||
1
changelog.d/5680.misc
Normal file
1
changelog.d/5680.misc
Normal file
@@ -0,0 +1 @@
|
||||
Lay the groundwork for structured logging output.
|
||||
1
changelog.d/5771.feature
Normal file
1
changelog.d/5771.feature
Normal file
@@ -0,0 +1 @@
|
||||
Make Opentracing work in worker mode.
|
||||
1
changelog.d/5776.misc
Normal file
1
changelog.d/5776.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update opentracing docs to use the unified `trace` method.
|
||||
1
changelog.d/5844.misc
Normal file
1
changelog.d/5844.misc
Normal file
@@ -0,0 +1 @@
|
||||
Retry well-known lookup before the cache expires, giving a grace period where the remote well-known can be down but we still use the old result.
|
||||
1
changelog.d/5845.feature
Normal file
1
changelog.d/5845.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add an admin API to purge old rooms from the database.
|
||||
1
changelog.d/5850.feature
Normal file
1
changelog.d/5850.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add retry to well-known lookups if we have recently seen a valid well-known record for the server.
|
||||
1
changelog.d/5852.feature
Normal file
1
changelog.d/5852.feature
Normal file
@@ -0,0 +1 @@
|
||||
Pass opentracing contexts between servers when transmitting EDUs.
|
||||
1
changelog.d/5855.misc
Normal file
1
changelog.d/5855.misc
Normal file
@@ -0,0 +1 @@
|
||||
Opentracing for room and e2e keys.
|
||||
1
changelog.d/5856.feature
Normal file
1
changelog.d/5856.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add a tag recording a request's authenticated entity and corresponding servlet in opentracing.
|
||||
1
changelog.d/5857.bugfix
Normal file
1
changelog.d/5857.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix database index so that different backup versions can have the same sessions.
|
||||
1
changelog.d/5859.feature
Normal file
1
changelog.d/5859.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add unstable support for MSC2197 (filtered search requests over federation), in order to allow upcoming room directory query performance improvements.
|
||||
1
changelog.d/5860.misc
Normal file
1
changelog.d/5860.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove log line for debugging issue #5407.
|
||||
1
changelog.d/5863.bugfix
Normal file
1
changelog.d/5863.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix Synapse looking for config options `password_reset_failure_template` and `password_reset_success_template`, when they are actually `password_reset_template_failure_html`, `password_reset_template_success_html`.
|
||||
1
changelog.d/5864.feature
Normal file
1
changelog.d/5864.feature
Normal file
@@ -0,0 +1 @@
|
||||
Correctly retry all hosts returned from SRV when we fail to connect.
|
||||
1
changelog.d/5877.removal
Normal file
1
changelog.d/5877.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove shared secret registration from client/r0/register endpoint. Contributed by Awesome Technologies Innovationslabor GmbH.
|
||||
1
changelog.d/5878.feature
Normal file
1
changelog.d/5878.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add admin API endpoint for setting whether or not a user is a server administrator.
|
||||
1
changelog.d/5885.bugfix
Normal file
1
changelog.d/5885.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix stack overflow when recovering an appservice which had an outage.
|
||||
1
changelog.d/5886.misc
Normal file
1
changelog.d/5886.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor the Appservice scheduler code.
|
||||
1
changelog.d/5893.misc
Normal file
1
changelog.d/5893.misc
Normal file
@@ -0,0 +1 @@
|
||||
Drop some unused tables.
|
||||
1
changelog.d/5894.misc
Normal file
1
changelog.d/5894.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add missing index on users_in_public_rooms to improve the performance of directory queries.
|
||||
1
changelog.d/5895.feature
Normal file
1
changelog.d/5895.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add config option to sign remote key query responses with a separate key.
|
||||
1
changelog.d/5896.misc
Normal file
1
changelog.d/5896.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve the logging when we have an error when fetching signing keys.
|
||||
1
changelog.d/5897.feature
Normal file
1
changelog.d/5897.feature
Normal file
@@ -0,0 +1 @@
|
||||
Switch to the v2 lookup API for 3PID invites.
|
||||
1
changelog.d/5900.feature
Normal file
1
changelog.d/5900.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add support for config templating.
|
||||
1
changelog.d/5902.feature
Normal file
1
changelog.d/5902.feature
Normal file
@@ -0,0 +1 @@
|
||||
Users with the type of "support" or "bot" are no longer required to consent.
|
||||
1
changelog.d/5904.feature
Normal file
1
changelog.d/5904.feature
Normal file
@@ -0,0 +1 @@
|
||||
Let synctl accept a directory of config files.
|
||||
1
changelog.d/5906.feature
Normal file
1
changelog.d/5906.feature
Normal file
@@ -0,0 +1 @@
|
||||
Increase max display name size to 256.
|
||||
1
changelog.d/5909.misc
Normal file
1
changelog.d/5909.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix error message which referred to public_base_url instead of public_baseurl. Thanks to @aaronraimist for the fix!
|
||||
1
changelog.d/5911.misc
Normal file
1
changelog.d/5911.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add support for database engine-specific schema deltas, based on file extension.
|
||||
1
changelog.d/5914.feature
Normal file
1
changelog.d/5914.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add admin API endpoint for getting whether or not a user is a server administrator.
|
||||
1
changelog.d/5920.bugfix
Normal file
1
changelog.d/5920.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a cache-invalidation bug for worker-based deployments.
|
||||
1
changelog.d/5922.misc
Normal file
1
changelog.d/5922.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update Buildkite pipeline to use plugins instead of buildkite-agent commands.
|
||||
1
changelog.d/5926.misc
Normal file
1
changelog.d/5926.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add link in sample config to the logging config schema.
|
||||
1
changelog.d/5930.misc
Normal file
1
changelog.d/5930.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add temporary flag to /versions in unstable_features to indicate this Synapse supports receiving id_access_token parameters on calls to identity server-proxying endpoints.
|
||||
1
changelog.d/5931.misc
Normal file
1
changelog.d/5931.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unnecessary parentheses in return statements.
|
||||
1
changelog.d/5938.misc
Normal file
1
changelog.d/5938.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused jenkins/prepare_sytest.sh file.
|
||||
1
changelog.d/5943.misc
Normal file
1
changelog.d/5943.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move Buildkite pipeline config to the pipelines repo.
|
||||
1
changelog.d/5953.misc
Normal file
1
changelog.d/5953.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update INSTALL.md to say that Python 2 is no longer supported.
|
||||
@@ -1,7 +1,7 @@
|
||||
# Example log_config file for synapse. To enable, point `log_config` to it in
|
||||
# Example log_config file for synapse. To enable, point `log_config` to it in
|
||||
# `homeserver.yaml`, and restart synapse.
|
||||
#
|
||||
# This configuration will produce similar results to the defaults within
|
||||
# This configuration will produce similar results to the defaults within
|
||||
# synapse, but can be edited to give more flexibility.
|
||||
|
||||
version: 1
|
||||
@@ -12,7 +12,7 @@ formatters:
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.util.logcontext.LoggingContextFilter
|
||||
(): synapse.logging.context.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
@@ -35,7 +35,7 @@ handlers:
|
||||
root:
|
||||
level: INFO
|
||||
handlers: [console] # to use file handler instead, switch to [file]
|
||||
|
||||
|
||||
loggers:
|
||||
synapse:
|
||||
level: INFO
|
||||
|
||||
@@ -36,7 +36,7 @@ from synapse.util import origin_from_ucid
|
||||
|
||||
from synapse.app.homeserver import SynapseHomeServer
|
||||
|
||||
# from synapse.util.logutils import log_function
|
||||
# from synapse.logging.utils import log_function
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python import log
|
||||
|
||||
@@ -51,4 +51,4 @@ TOKEN=$(sql "SELECT token FROM access_tokens WHERE user_id='$ADMIN' ORDER BY id
|
||||
# finally start pruning media:
|
||||
###############################################################################
|
||||
set -x # for debugging the generated string
|
||||
curl --header "Authorization: Bearer $TOKEN" -v POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"
|
||||
curl --header "Authorization: Bearer $TOKEN" -X POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"
|
||||
|
||||
@@ -4,7 +4,8 @@ After=matrix-synapse.service
|
||||
BindsTo=matrix-synapse.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
Type=notify
|
||||
NotifyAccess=main
|
||||
User=matrix-synapse
|
||||
WorkingDirectory=/var/lib/matrix-synapse
|
||||
EnvironmentFile=/etc/default/matrix-synapse
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
Description=Synapse Matrix Homeserver
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
Type=notify
|
||||
NotifyAccess=main
|
||||
User=matrix-synapse
|
||||
WorkingDirectory=/var/lib/matrix-synapse
|
||||
EnvironmentFile=/etc/default/matrix-synapse
|
||||
|
||||
@@ -8,7 +8,7 @@ formatters:
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.util.logcontext.LoggingContextFilter
|
||||
(): synapse.logging.context.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
|
||||
@@ -14,7 +14,9 @@
|
||||
Description=Synapse Matrix homeserver
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
Type=notify
|
||||
NotifyAccess=main
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
Restart=on-abort
|
||||
|
||||
User=synapse
|
||||
|
||||
36
debian/changelog
vendored
36
debian/changelog
vendored
@@ -1,9 +1,41 @@
|
||||
matrix-synapse-py3 (1.0.0+nmu1) UNRELEASED; urgency=medium
|
||||
matrix-synapse-py3 (1.3.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.3.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Sat, 17 Aug 2019 09:15:49 +0100
|
||||
|
||||
matrix-synapse-py3 (1.3.0) stable; urgency=medium
|
||||
|
||||
[ Andrew Morgan ]
|
||||
* Remove libsqlite3-dev from required build dependencies.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.3.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 15 Aug 2019 12:04:23 +0100
|
||||
|
||||
matrix-synapse-py3 (1.2.0) stable; urgency=medium
|
||||
|
||||
[ Amber Brown ]
|
||||
* Update logging config defaults to match API changes in Synapse.
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
* Add Recommends and Depends for some libraries which you probably want.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.2.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Jul 2019 14:10:07 +0100
|
||||
|
||||
matrix-synapse-py3 (1.1.0) stable; urgency=medium
|
||||
|
||||
[ Silke Hofstra ]
|
||||
* Include systemd-python to allow logging to the systemd journal.
|
||||
|
||||
-- Silke Hofstra <silke@slxh.eu> Wed, 29 May 2019 09:45:29 +0200
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.1.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 04 Jul 2019 11:43:41 +0100
|
||||
|
||||
matrix-synapse-py3 (1.0.0) stable; urgency=medium
|
||||
|
||||
|
||||
6
debian/control
vendored
6
debian/control
vendored
@@ -2,10 +2,13 @@ Source: matrix-synapse-py3
|
||||
Section: contrib/python
|
||||
Priority: extra
|
||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
||||
Build-Depends:
|
||||
debhelper (>= 9),
|
||||
dh-systemd,
|
||||
dh-virtualenv (>= 1.1),
|
||||
libsystemd-dev,
|
||||
libpq-dev,
|
||||
lsb-release,
|
||||
python3-dev,
|
||||
python3,
|
||||
@@ -28,9 +31,12 @@ Depends:
|
||||
debconf,
|
||||
python3-distutils|libpython3-stdlib (<< 3.6),
|
||||
${misc:Depends},
|
||||
${shlibs:Depends},
|
||||
${synapse:pydepends},
|
||||
# some of our scripts use perl, but none of them are important,
|
||||
# so we put perl:Depends in Suggests rather than Depends.
|
||||
Recommends:
|
||||
${shlibs1:Recommends},
|
||||
Suggests:
|
||||
sqlite3,
|
||||
${perl:Depends},
|
||||
|
||||
2
debian/log.yaml
vendored
2
debian/log.yaml
vendored
@@ -7,7 +7,7 @@ formatters:
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.util.logcontext.LoggingContextFilter
|
||||
(): synapse.logging.context.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
|
||||
14
debian/rules
vendored
14
debian/rules
vendored
@@ -3,15 +3,29 @@
|
||||
# Build Debian package using https://github.com/spotify/dh-virtualenv
|
||||
#
|
||||
|
||||
# assume we only have one package
|
||||
PACKAGE_NAME:=`dh_listpackages`
|
||||
|
||||
override_dh_systemd_enable:
|
||||
dh_systemd_enable --name=matrix-synapse
|
||||
|
||||
override_dh_installinit:
|
||||
dh_installinit --name=matrix-synapse
|
||||
|
||||
# we don't really want to strip the symbols from our object files.
|
||||
override_dh_strip:
|
||||
|
||||
override_dh_shlibdeps:
|
||||
# make the postgres package's dependencies a recommendation
|
||||
# rather than a hard dependency.
|
||||
find debian/$(PACKAGE_NAME)/ -path '*/site-packages/psycopg2/*.so' | \
|
||||
xargs dpkg-shlibdeps -Tdebian/$(PACKAGE_NAME).substvars \
|
||||
-pshlibs1 -dRecommends
|
||||
|
||||
# all the other dependencies can be normal 'Depends' requirements,
|
||||
# except for PIL's, which is self-contained and which confuses
|
||||
# dpkg-shlibdeps.
|
||||
dh_shlibdeps -X site-packages/PIL/.libs -X site-packages/psycopg2
|
||||
|
||||
override_dh_virtualenv:
|
||||
./debian/build_virtualenv
|
||||
|
||||
@@ -29,7 +29,7 @@ for port in 8080 8081 8082; do
|
||||
|
||||
if ! grep -F "Customisation made by demo/start.sh" -q $DIR/etc/$port.config; then
|
||||
printf '\n\n# Customisation made by demo/start.sh\n' >> $DIR/etc/$port.config
|
||||
|
||||
|
||||
echo 'enable_registration: true' >> $DIR/etc/$port.config
|
||||
|
||||
# Warning, this heredoc depends on the interaction of tabs and spaces. Please don't
|
||||
@@ -43,7 +43,7 @@ for port in 8080 8081 8082; do
|
||||
tls: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
|
||||
|
||||
- port: $port
|
||||
tls: false
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
@@ -68,7 +68,7 @@ for port in 8080 8081 8082; do
|
||||
|
||||
# Generate tls keys
|
||||
openssl req -x509 -newkey rsa:4096 -keyout $DIR/etc/localhost\:$https_port.tls.key -out $DIR/etc/localhost\:$https_port.tls.crt -days 365 -nodes -subj "/O=matrix"
|
||||
|
||||
|
||||
# Ignore keys from the trusted keys server
|
||||
echo '# Ignore keys from the trusted keys server' >> $DIR/etc/$port.config
|
||||
echo 'trusted_key_servers:' >> $DIR/etc/$port.config
|
||||
@@ -120,7 +120,6 @@ for port in 8080 8081 8082; do
|
||||
python3 -m synapse.app.homeserver \
|
||||
--config-path "$DIR/etc/$port.config" \
|
||||
-D \
|
||||
-vv \
|
||||
|
||||
popd
|
||||
done
|
||||
|
||||
@@ -16,7 +16,7 @@ ARG PYTHON_VERSION=3.7
|
||||
###
|
||||
### Stage 0: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8 as builder
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.10 as builder
|
||||
|
||||
# install the OS build deps
|
||||
|
||||
@@ -55,7 +55,7 @@ RUN pip install --prefix="/install" --no-warn-script-location \
|
||||
### Stage 1: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.10
|
||||
|
||||
# xmlsec is required for saml support
|
||||
RUN apk add --no-cache --virtual .runtime_deps \
|
||||
|
||||
@@ -42,7 +42,15 @@ RUN cd dh-virtualenv-1.1 && dpkg-buildpackage -us -uc -b
|
||||
###
|
||||
FROM ${distro}
|
||||
|
||||
# Get the distro we want to pull from as a dynamic build variable
|
||||
# (We need to define it in each build stage)
|
||||
ARG distro=""
|
||||
ENV distro ${distro}
|
||||
|
||||
# Install the build dependencies
|
||||
#
|
||||
# NB: keep this list in sync with the list of build-deps in debian/control
|
||||
# TODO: it would be nice to do that automatically.
|
||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
|
||||
@@ -17,7 +17,7 @@ By default, the image expects a single volume, located at ``/data``, that will h
|
||||
* the appservices configuration.
|
||||
|
||||
You are free to use separate volumes depending on storage endpoints at your
|
||||
disposal. For instance, ``/data/media`` coud be stored on a large but low
|
||||
disposal. For instance, ``/data/media`` could be stored on a large but low
|
||||
performance hdd storage while other files could be stored on high performance
|
||||
endpoints.
|
||||
|
||||
@@ -27,8 +27,8 @@ configuration file there. Multiple application services are supported.
|
||||
|
||||
## Generating a configuration file
|
||||
|
||||
The first step is to genearte a valid config file. To do this, you can run the
|
||||
image with the `generate` commandline option.
|
||||
The first step is to generate a valid config file. To do this, you can run the
|
||||
image with the `generate` command line option.
|
||||
|
||||
You will need to specify values for the `SYNAPSE_SERVER_NAME` and
|
||||
`SYNAPSE_REPORT_STATS` environment variable, and mount a docker volume to store
|
||||
@@ -59,7 +59,7 @@ The following environment variables are supported in `generate` mode:
|
||||
* `SYNAPSE_CONFIG_PATH`: path to the file to be generated. Defaults to
|
||||
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
|
||||
* `SYNAPSE_DATA_DIR`: where the generated config will put persistent data
|
||||
such as the datatase and media store. Defaults to `/data`.
|
||||
such as the database and media store. Defaults to `/data`.
|
||||
* `UID`, `GID`: the user id and group id to use for creating the data
|
||||
directories. Defaults to `991`, `991`.
|
||||
|
||||
@@ -115,7 +115,7 @@ not given).
|
||||
|
||||
To migrate from a dynamic configuration file to a static one, run the docker
|
||||
container once with the environment variables set, and `migrate_config`
|
||||
commandline option. For example:
|
||||
command line option. For example:
|
||||
|
||||
```
|
||||
docker run -it --rm \
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
|
||||
set -ex
|
||||
|
||||
DIST=`lsb_release -c -s`
|
||||
# Get the codename from distro env
|
||||
DIST=`cut -d ':' -f2 <<< $distro`
|
||||
|
||||
# we get a read-only copy of the source: make a writeable copy
|
||||
cp -aT /synapse/source /synapse/build
|
||||
|
||||
@@ -207,22 +207,3 @@ perspectives:
|
||||
|
||||
password_config:
|
||||
enabled: true
|
||||
|
||||
{% if SYNAPSE_SMTP_HOST %}
|
||||
email:
|
||||
enable_notifs: false
|
||||
smtp_host: "{{ SYNAPSE_SMTP_HOST }}"
|
||||
smtp_port: {{ SYNAPSE_SMTP_PORT or "25" }}
|
||||
smtp_user: "{{ SYNAPSE_SMTP_USER }}"
|
||||
smtp_pass: "{{ SYNAPSE_SMTP_PASSWORD }}"
|
||||
require_transport_security: False
|
||||
notif_from: "{{ SYNAPSE_SMTP_FROM or "hostmaster@" + SYNAPSE_SERVER_NAME }}"
|
||||
app_name: Matrix
|
||||
# if template_dir is unset, uses the example templates that are part of
|
||||
# the Synapse distribution.
|
||||
#template_dir: res/templates
|
||||
notif_template_html: notif_mail.html
|
||||
notif_template_text: notif_mail.txt
|
||||
notif_for_new_users: True
|
||||
riot_base_url: "https://{{ SYNAPSE_SERVER_NAME }}"
|
||||
{% endif %}
|
||||
|
||||
@@ -2,11 +2,11 @@ version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s- %(message)s'
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.util.logcontext.LoggingContextFilter
|
||||
(): synapse.logging.context.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
|
||||
18
docs/admin_api/purge_room.md
Normal file
18
docs/admin_api/purge_room.md
Normal file
@@ -0,0 +1,18 @@
|
||||
Purge room API
|
||||
==============
|
||||
|
||||
This API will remove all trace of a room from your database.
|
||||
|
||||
All local users must have left the room before it can be removed.
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/purge_room
|
||||
|
||||
{
|
||||
"room_id": "!room:id"
|
||||
}
|
||||
```
|
||||
|
||||
You must authenticate using the access token of an admin user.
|
||||
@@ -84,3 +84,42 @@ with a body of:
|
||||
}
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
|
||||
Get whether a user is a server administrator or not
|
||||
===================================================
|
||||
|
||||
|
||||
The api is::
|
||||
|
||||
GET /_synapse/admin/v1/users/<user_id>/admin
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"admin": true
|
||||
}
|
||||
|
||||
|
||||
Change whether a user is a server administrator or not
|
||||
======================================================
|
||||
|
||||
Note that you cannot demote yourself.
|
||||
|
||||
The api is::
|
||||
|
||||
PUT /_synapse/admin/v1/users/<user_id>/admin
|
||||
|
||||
with a body of:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"admin": true
|
||||
}
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
# Code Style
|
||||
Code Style
|
||||
==========
|
||||
|
||||
Formatting tools
|
||||
----------------
|
||||
|
||||
The Synapse codebase uses a number of code formatting tools in order to
|
||||
quickly and automatically check for formatting (and sometimes logical) errors
|
||||
@@ -6,20 +10,20 @@ in code.
|
||||
|
||||
The necessary tools are detailed below.
|
||||
|
||||
## Formatting tools
|
||||
- **black**
|
||||
|
||||
The Synapse codebase uses [black](https://pypi.org/project/black/) as an
|
||||
opinionated code formatter, ensuring all comitted code is properly
|
||||
formatted.
|
||||
The Synapse codebase uses `black <https://pypi.org/project/black/>`_ as an
|
||||
opinionated code formatter, ensuring all comitted code is properly
|
||||
formatted.
|
||||
|
||||
First install ``black`` with::
|
||||
First install ``black`` with::
|
||||
|
||||
pip install --upgrade black
|
||||
pip install --upgrade black
|
||||
|
||||
Have ``black`` auto-format your code (it shouldn't change any
|
||||
functionality) with::
|
||||
Have ``black`` auto-format your code (it shouldn't change any functionality)
|
||||
with::
|
||||
|
||||
black . --exclude="\.tox|build|env"
|
||||
black . --exclude="\.tox|build|env"
|
||||
|
||||
- **flake8**
|
||||
|
||||
@@ -54,17 +58,16 @@ functionality is supported in your editor for a more convenient development
|
||||
workflow. It is not, however, recommended to run ``flake8`` on save as it
|
||||
takes a while and is very resource intensive.
|
||||
|
||||
## General rules
|
||||
General rules
|
||||
-------------
|
||||
|
||||
- **Naming**:
|
||||
|
||||
- Use camel case for class and type names
|
||||
- Use underscores for functions and variables.
|
||||
|
||||
- Use double quotes ``"foo"`` rather than single quotes ``'foo'``.
|
||||
|
||||
- **Comments**: should follow the `google code style
|
||||
<http://google.github.io/styleguide/pyguide.html?showone=Comments#Comments>`_.
|
||||
- **Docstrings**: should follow the `google code style
|
||||
<https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings>`_.
|
||||
This is so that we can generate documentation with `sphinx
|
||||
<http://sphinxcontrib-napoleon.readthedocs.org/en/latest/>`_. See the
|
||||
`examples
|
||||
@@ -73,6 +76,8 @@ takes a while and is very resource intensive.
|
||||
|
||||
- **Imports**:
|
||||
|
||||
- Imports should be sorted by ``isort`` as described above.
|
||||
|
||||
- Prefer to import classes and functions rather than packages or modules.
|
||||
|
||||
Example::
|
||||
@@ -92,25 +97,84 @@ takes a while and is very resource intensive.
|
||||
This goes against the advice in the Google style guide, but it means that
|
||||
errors in the name are caught early (at import time).
|
||||
|
||||
- Multiple imports from the same package can be combined onto one line::
|
||||
|
||||
from synapse.types import GroupID, RoomID, UserID
|
||||
|
||||
An effort should be made to keep the individual imports in alphabetical
|
||||
order.
|
||||
|
||||
If the list becomes long, wrap it with parentheses and split it over
|
||||
multiple lines.
|
||||
|
||||
- As per `PEP-8 <https://www.python.org/dev/peps/pep-0008/#imports>`_,
|
||||
imports should be grouped in the following order, with a blank line between
|
||||
each group:
|
||||
|
||||
1. standard library imports
|
||||
2. related third party imports
|
||||
3. local application/library specific imports
|
||||
|
||||
- Imports within each group should be sorted alphabetically by module name.
|
||||
|
||||
- Avoid wildcard imports (``from synapse.types import *``) and relative
|
||||
imports (``from .types import UserID``).
|
||||
|
||||
Configuration file format
|
||||
-------------------------
|
||||
|
||||
The `sample configuration file <./sample_config.yaml>`_ acts as a reference to
|
||||
Synapse's configuration options for server administrators. Remember that many
|
||||
readers will be unfamiliar with YAML and server administration in general, so
|
||||
that it is important that the file be as easy to understand as possible, which
|
||||
includes following a consistent format.
|
||||
|
||||
Some guidelines follow:
|
||||
|
||||
* Sections should be separated with a heading consisting of a single line
|
||||
prefixed and suffixed with ``##``. There should be **two** blank lines
|
||||
before the section header, and **one** after.
|
||||
|
||||
* Each option should be listed in the file with the following format:
|
||||
|
||||
* A comment describing the setting. Each line of this comment should be
|
||||
prefixed with a hash (``#``) and a space.
|
||||
|
||||
The comment should describe the default behaviour (ie, what happens if
|
||||
the setting is omitted), as well as what the effect will be if the
|
||||
setting is changed.
|
||||
|
||||
Often, the comment end with something like "uncomment the
|
||||
following to \<do action>".
|
||||
|
||||
* A line consisting of only ``#``.
|
||||
|
||||
* A commented-out example setting, prefixed with only ``#``.
|
||||
|
||||
For boolean (on/off) options, convention is that this example should be
|
||||
the *opposite* to the default (so the comment will end with "Uncomment
|
||||
the following to enable [or disable] \<feature\>." For other options,
|
||||
the example should give some non-default value which is likely to be
|
||||
useful to the reader.
|
||||
|
||||
* There should be a blank line between each option.
|
||||
|
||||
* Where several settings are grouped into a single dict, *avoid* the
|
||||
convention where the whole block is commented out, resulting in comment
|
||||
lines starting ``# #``, as this is hard to read and confusing to
|
||||
edit. Instead, leave the top-level config option uncommented, and follow
|
||||
the conventions above for sub-options. Ensure that your code correctly
|
||||
handles the top-level option being set to ``None`` (as it will be if no
|
||||
sub-options are enabled).
|
||||
|
||||
* Lines should be wrapped at 80 characters.
|
||||
|
||||
Example::
|
||||
|
||||
## Frobnication ##
|
||||
|
||||
# The frobnicator will ensure that all requests are fully frobnicated.
|
||||
# To enable it, uncomment the following.
|
||||
#
|
||||
#frobnicator_enabled: true
|
||||
|
||||
# By default, the frobnicator will frobnicate with the default frobber.
|
||||
# The following will make it use an alternative frobber.
|
||||
#
|
||||
#frobincator_frobber: special_frobber
|
||||
|
||||
# Settings for the frobber
|
||||
#
|
||||
frobber:
|
||||
# frobbing speed. Defaults to 1.
|
||||
#
|
||||
#speed: 10
|
||||
|
||||
# frobbing distance. Defaults to 1000.
|
||||
#
|
||||
#distance: 100
|
||||
|
||||
Note that the sample configuration is generated from the synapse code and is
|
||||
maintained by a script, ``scripts-dev/generate_sample_config``. Making sure
|
||||
that the output from this script matches the desired format is left as an
|
||||
exercise for the reader!
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Log contexts
|
||||
Log Contexts
|
||||
============
|
||||
|
||||
.. contents::
|
||||
@@ -12,7 +12,7 @@ record.
|
||||
Logcontexts are also used for CPU and database accounting, so that we can track
|
||||
which requests were responsible for high CPU use or database activity.
|
||||
|
||||
The ``synapse.util.logcontext`` module provides a facilities for managing the
|
||||
The ``synapse.logging.context`` module provides a facilities for managing the
|
||||
current log context (as well as providing the ``LoggingContextFilter`` class).
|
||||
|
||||
Deferreds make the whole thing complicated, so this document describes how it
|
||||
@@ -27,19 +27,19 @@ found them:
|
||||
|
||||
.. code:: python
|
||||
|
||||
from synapse.util import logcontext # omitted from future snippets
|
||||
from synapse.logging import context # omitted from future snippets
|
||||
|
||||
def handle_request(request_id):
|
||||
request_context = logcontext.LoggingContext()
|
||||
request_context = context.LoggingContext()
|
||||
|
||||
calling_context = logcontext.LoggingContext.current_context()
|
||||
logcontext.LoggingContext.set_current_context(request_context)
|
||||
calling_context = context.LoggingContext.current_context()
|
||||
context.LoggingContext.set_current_context(request_context)
|
||||
try:
|
||||
request_context.request = request_id
|
||||
do_request_handling()
|
||||
logger.debug("finished")
|
||||
finally:
|
||||
logcontext.LoggingContext.set_current_context(calling_context)
|
||||
context.LoggingContext.set_current_context(calling_context)
|
||||
|
||||
def do_request_handling():
|
||||
logger.debug("phew") # this will be logged against request_id
|
||||
@@ -51,7 +51,7 @@ written much more succinctly as:
|
||||
.. code:: python
|
||||
|
||||
def handle_request(request_id):
|
||||
with logcontext.LoggingContext() as request_context:
|
||||
with context.LoggingContext() as request_context:
|
||||
request_context.request = request_id
|
||||
do_request_handling()
|
||||
logger.debug("finished")
|
||||
@@ -74,7 +74,7 @@ blocking operation, and returns a deferred:
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_request(request_id):
|
||||
with logcontext.LoggingContext() as request_context:
|
||||
with context.LoggingContext() as request_context:
|
||||
request_context.request = request_id
|
||||
yield do_request_handling()
|
||||
logger.debug("finished")
|
||||
@@ -148,7 +148,7 @@ call any other functions.
|
||||
d = more_stuff()
|
||||
result = yield d # also fine, of course
|
||||
|
||||
defer.returnValue(result)
|
||||
return result
|
||||
|
||||
def nonInlineCallbacksFun():
|
||||
logger.debug("just a wrapper really")
|
||||
@@ -179,7 +179,7 @@ though, we need to make up a new Deferred, or we get a Deferred back from
|
||||
external code. We need to make it follow our rules.
|
||||
|
||||
The easy way to do it is with a combination of ``defer.inlineCallbacks``, and
|
||||
``logcontext.PreserveLoggingContext``. Suppose we want to implement ``sleep``,
|
||||
``context.PreserveLoggingContext``. Suppose we want to implement ``sleep``,
|
||||
which returns a deferred which will run its callbacks after a given number of
|
||||
seconds. That might look like:
|
||||
|
||||
@@ -204,13 +204,13 @@ That doesn't follow the rules, but we can fix it by wrapping it with
|
||||
This technique works equally for external functions which return deferreds,
|
||||
or deferreds we have made ourselves.
|
||||
|
||||
You can also use ``logcontext.make_deferred_yieldable``, which just does the
|
||||
You can also use ``context.make_deferred_yieldable``, which just does the
|
||||
boilerplate for you, so the above could be written:
|
||||
|
||||
.. code:: python
|
||||
|
||||
def sleep(seconds):
|
||||
return logcontext.make_deferred_yieldable(get_sleep_deferred(seconds))
|
||||
return context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
||||
|
||||
|
||||
Fire-and-forget
|
||||
@@ -279,7 +279,7 @@ Obviously that option means that the operations done in
|
||||
that might be fixed by setting a different logcontext via a ``with
|
||||
LoggingContext(...)`` in ``background_operation``).
|
||||
|
||||
The second option is to use ``logcontext.run_in_background``, which wraps a
|
||||
The second option is to use ``context.run_in_background``, which wraps a
|
||||
function so that it doesn't reset the logcontext even when it returns an
|
||||
incomplete deferred, and adds a callback to the returned deferred to reset the
|
||||
logcontext. In other words, it turns a function that follows the Synapse rules
|
||||
@@ -293,7 +293,7 @@ It can be used like this:
|
||||
def do_request_handling():
|
||||
yield foreground_operation()
|
||||
|
||||
logcontext.run_in_background(background_operation)
|
||||
context.run_in_background(background_operation)
|
||||
|
||||
# this will now be logged against the request context
|
||||
logger.debug("Request handling complete")
|
||||
@@ -332,7 +332,7 @@ gathered:
|
||||
result = yield defer.gatherResults([d1, d2])
|
||||
|
||||
In this case particularly, though, option two, of using
|
||||
``logcontext.preserve_fn`` almost certainly makes more sense, so that
|
||||
``context.preserve_fn`` almost certainly makes more sense, so that
|
||||
``operation1`` and ``operation2`` are both logged against the original
|
||||
logcontext. This looks like:
|
||||
|
||||
@@ -340,8 +340,8 @@ logcontext. This looks like:
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def do_request_handling():
|
||||
d1 = logcontext.preserve_fn(operation1)()
|
||||
d2 = logcontext.preserve_fn(operation2)()
|
||||
d1 = context.preserve_fn(operation1)()
|
||||
d2 = context.preserve_fn(operation2)()
|
||||
|
||||
with PreserveLoggingContext():
|
||||
result = yield defer.gatherResults([d1, d2])
|
||||
@@ -381,7 +381,7 @@ off the background process, and then leave the ``with`` block to wait for it:
|
||||
.. code:: python
|
||||
|
||||
def handle_request(request_id):
|
||||
with logcontext.LoggingContext() as request_context:
|
||||
with context.LoggingContext() as request_context:
|
||||
request_context.request = request_id
|
||||
d = do_request_handling()
|
||||
|
||||
@@ -414,7 +414,7 @@ runs its callbacks in the original logcontext, all is happy.
|
||||
|
||||
The business of a Deferred which runs its callbacks in the original logcontext
|
||||
isn't hard to achieve — we have it today, in the shape of
|
||||
``logcontext._PreservingContextDeferred``:
|
||||
``context._PreservingContextDeferred``:
|
||||
|
||||
.. code:: python
|
||||
|
||||
|
||||
@@ -59,6 +59,108 @@ How to monitor Synapse metrics using Prometheus
|
||||
Restart Prometheus.
|
||||
|
||||
|
||||
Renaming of metrics & deprecation of old names in 1.2
|
||||
-----------------------------------------------------
|
||||
|
||||
Synapse 1.2 updates the Prometheus metrics to match the naming convention of the
|
||||
upstream ``prometheus_client``. The old names are considered deprecated and will
|
||||
be removed in a future version of Synapse.
|
||||
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| New Name | Old Name |
|
||||
+=============================================================================+=======================================================================+
|
||||
| python_gc_objects_collected_total | python_gc_objects_collected |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| python_gc_objects_uncollectable_total | python_gc_objects_uncollectable |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| python_gc_collections_total | python_gc_collections |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| process_cpu_seconds_total | process_cpu_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_federation_client_sent_transactions_total | synapse_federation_client_sent_transactions |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_federation_client_events_processed_total | synapse_federation_client_events_processed |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_event_processing_loop_count_total | synapse_event_processing_loop_count |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_event_processing_loop_room_count_total | synapse_event_processing_loop_room_count |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_util_metrics_block_count_total | synapse_util_metrics_block_count |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_util_metrics_block_time_seconds_total | synapse_util_metrics_block_time_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_util_metrics_block_ru_utime_seconds_total | synapse_util_metrics_block_ru_utime_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_util_metrics_block_ru_stime_seconds_total | synapse_util_metrics_block_ru_stime_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_util_metrics_block_db_txn_count_total | synapse_util_metrics_block_db_txn_count |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_util_metrics_block_db_txn_duration_seconds_total | synapse_util_metrics_block_db_txn_duration_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_util_metrics_block_db_sched_duration_seconds_total | synapse_util_metrics_block_db_sched_duration_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_background_process_start_count_total | synapse_background_process_start_count |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_background_process_ru_utime_seconds_total | synapse_background_process_ru_utime_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_background_process_ru_stime_seconds_total | synapse_background_process_ru_stime_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_background_process_db_txn_count_total | synapse_background_process_db_txn_count |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_background_process_db_txn_duration_seconds_total | synapse_background_process_db_txn_duration_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_background_process_db_sched_duration_seconds_total | synapse_background_process_db_sched_duration_seconds |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_storage_events_persisted_events_total | synapse_storage_events_persisted_events |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_storage_events_persisted_events_sep_total | synapse_storage_events_persisted_events_sep |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_storage_events_state_delta_total | synapse_storage_events_state_delta |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_storage_events_state_delta_single_event_total | synapse_storage_events_state_delta_single_event |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_storage_events_state_delta_reuse_delta_total | synapse_storage_events_state_delta_reuse_delta |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_federation_server_received_pdus_total | synapse_federation_server_received_pdus |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_federation_server_received_edus_total | synapse_federation_server_received_edus |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_handler_presence_notified_presence_total | synapse_handler_presence_notified_presence |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_handler_presence_federation_presence_out_total | synapse_handler_presence_federation_presence_out |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_handler_presence_presence_updates_total | synapse_handler_presence_presence_updates |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_handler_presence_timers_fired_total | synapse_handler_presence_timers_fired |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_handler_presence_federation_presence_total | synapse_handler_presence_federation_presence |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_handler_presence_bump_active_time_total | synapse_handler_presence_bump_active_time |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_federation_client_sent_edus_total | synapse_federation_client_sent_edus |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_federation_client_sent_pdu_destinations_count_total | synapse_federation_client_sent_pdu_destinations:count |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_federation_client_sent_pdu_destinations_total | synapse_federation_client_sent_pdu_destinations:total |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_handlers_appservice_events_processed_total | synapse_handlers_appservice_events_processed |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_notifier_notified_events_total | synapse_notifier_notified_events |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total | synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter_total | synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_http_httppusher_http_pushes_processed_total | synapse_http_httppusher_http_pushes_processed |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_http_httppusher_http_pushes_failed_total | synapse_http_httppusher_http_pushes_failed |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_http_httppusher_badge_updates_processed_total | synapse_http_httppusher_badge_updates_processed |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
| synapse_http_httppusher_badge_updates_failed_total | synapse_http_httppusher_badge_updates_failed |
|
||||
+-----------------------------------------------------------------------------+-----------------------------------------------------------------------+
|
||||
|
||||
|
||||
Removal of deprecated metrics & time based counters becoming histograms in 0.31.0
|
||||
---------------------------------------------------------------------------------
|
||||
|
||||
|
||||
123
docs/opentracing.rst
Normal file
123
docs/opentracing.rst
Normal file
@@ -0,0 +1,123 @@
|
||||
===========
|
||||
OpenTracing
|
||||
===========
|
||||
|
||||
Background
|
||||
----------
|
||||
|
||||
OpenTracing is a semi-standard being adopted by a number of distributed tracing
|
||||
platforms. It is a common api for facilitating vendor-agnostic tracing
|
||||
instrumentation. That is, we can use the OpenTracing api and select one of a
|
||||
number of tracer implementations to do the heavy lifting in the background.
|
||||
Our current selected implementation is Jaeger.
|
||||
|
||||
OpenTracing is a tool which gives an insight into the causal relationship of
|
||||
work done in and between servers. The servers each track events and report them
|
||||
to a centralised server - in Synapse's case: Jaeger. The basic unit used to
|
||||
represent events is the span. The span roughly represents a single piece of work
|
||||
that was done and the time at which it occurred. A span can have child spans,
|
||||
meaning that the work of the child had to be completed for the parent span to
|
||||
complete, or it can have follow-on spans which represent work that is undertaken
|
||||
as a result of the parent but is not depended on by the parent to in order to
|
||||
finish.
|
||||
|
||||
Since this is undertaken in a distributed environment a request to another
|
||||
server, such as an RPC or a simple GET, can be considered a span (a unit or
|
||||
work) for the local server. This causal link is what OpenTracing aims to
|
||||
capture and visualise. In order to do this metadata about the local server's
|
||||
span, i.e the 'span context', needs to be included with the request to the
|
||||
remote.
|
||||
|
||||
It is up to the remote server to decide what it does with the spans
|
||||
it creates. This is called the sampling policy and it can be configured
|
||||
through Jaeger's settings.
|
||||
|
||||
For OpenTracing concepts see
|
||||
https://opentracing.io/docs/overview/what-is-tracing/.
|
||||
|
||||
For more information about Jaeger's implementation see
|
||||
https://www.jaegertracing.io/docs/
|
||||
|
||||
=====================
|
||||
Seting up OpenTracing
|
||||
=====================
|
||||
|
||||
To receive OpenTracing spans, start up a Jaeger server. This can be done
|
||||
using docker like so:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
docker run -d --name jaeger
|
||||
-p 6831:6831/udp \
|
||||
-p 6832:6832/udp \
|
||||
-p 5778:5778 \
|
||||
-p 16686:16686 \
|
||||
-p 14268:14268 \
|
||||
jaegertracing/all-in-one:1.13
|
||||
|
||||
Latest documentation is probably at
|
||||
https://www.jaegertracing.io/docs/1.13/getting-started/
|
||||
|
||||
|
||||
Enable OpenTracing in Synapse
|
||||
-----------------------------
|
||||
|
||||
OpenTracing is not enabled by default. It must be enabled in the homeserver
|
||||
config by uncommenting the config options under ``opentracing`` as shown in
|
||||
the `sample config <./sample_config.yaml>`_. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
opentracing:
|
||||
tracer_enabled: true
|
||||
homeserver_whitelist:
|
||||
- "mytrustedhomeserver.org"
|
||||
- "*.myotherhomeservers.com"
|
||||
|
||||
Homeserver whitelisting
|
||||
-----------------------
|
||||
|
||||
The homeserver whitelist is configured using regular expressions. A list of regular
|
||||
expressions can be given and their union will be compared when propagating any
|
||||
spans contexts to another homeserver.
|
||||
|
||||
Though it's mostly safe to send and receive span contexts to and from
|
||||
untrusted users since span contexts are usually opaque ids it can lead to
|
||||
two problems, namely:
|
||||
|
||||
- If the span context is marked as sampled by the sending homeserver the receiver will
|
||||
sample it. Therefore two homeservers with wildly different sampling policies
|
||||
could incur higher sampling counts than intended.
|
||||
- Sending servers can attach arbitrary data to spans, known as 'baggage'. For safety this has been disabled in Synapse
|
||||
but that doesn't prevent another server sending you baggage which will be logged
|
||||
to OpenTracing's logs.
|
||||
|
||||
==========
|
||||
EDU FORMAT
|
||||
==========
|
||||
|
||||
EDUs can contain tracing data in their content. This is not specced but
|
||||
it could be of interest for other homeservers.
|
||||
|
||||
EDU format (if you're using jaeger):
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"edu_type": "type",
|
||||
"content": {
|
||||
"org.matrix.opentracing_context": {
|
||||
"uber-trace-id": "fe57cf3e65083289"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Though you don't have to use jaeger you must inject the span context into
|
||||
`org.matrix.opentracing_context` using the opentracing `Format.TEXT_MAP` inject method.
|
||||
|
||||
==================
|
||||
Configuring Jaeger
|
||||
==================
|
||||
|
||||
Sampling strategies can be set as in this document:
|
||||
https://www.jaegertracing.io/docs/1.13/sampling/
|
||||
@@ -11,7 +11,9 @@ a postgres database.
|
||||
|
||||
* If you are using the `matrix.org debian/ubuntu
|
||||
packages <../INSTALL.md#matrixorg-packages>`_,
|
||||
the necessary libraries will already be installed.
|
||||
the necessary python library will already be installed, but you will need to
|
||||
ensure the low-level postgres library is installed, which you can do with
|
||||
``apt install libpq5``.
|
||||
|
||||
* For other pre-built packages, please consult the documentation from the
|
||||
relevant package.
|
||||
@@ -34,9 +36,14 @@ Assuming your PostgreSQL database user is called ``postgres``, create a user
|
||||
su - postgres
|
||||
createuser --pwprompt synapse_user
|
||||
|
||||
The PostgreSQL database used *must* have the correct encoding set, otherwise it
|
||||
would not be able to store UTF8 strings. To create a database with the correct
|
||||
encoding use, e.g.::
|
||||
Before you can authenticate with the ``synapse_user``, you must create a
|
||||
database that it can access. To create a database, first connect to the database
|
||||
with your database user::
|
||||
|
||||
su - postgres
|
||||
psql
|
||||
|
||||
and then run::
|
||||
|
||||
CREATE DATABASE synapse
|
||||
ENCODING 'UTF8'
|
||||
@@ -46,7 +53,13 @@ encoding use, e.g.::
|
||||
OWNER synapse_user;
|
||||
|
||||
This would create an appropriate database named ``synapse`` owned by the
|
||||
``synapse_user`` user (which must already exist).
|
||||
``synapse_user`` user (which must already have been created as above).
|
||||
|
||||
Note that the PostgreSQL database *must* have the correct encoding set (as
|
||||
shown above), otherwise it will not be able to store UTF8 strings.
|
||||
|
||||
You may need to enable password authentication so ``synapse_user`` can connect
|
||||
to the database. See https://www.postgresql.org/docs/11/auth-pg-hba-conf.html.
|
||||
|
||||
Tuning Postgres
|
||||
===============
|
||||
|
||||
@@ -48,6 +48,8 @@ Let's assume that we expect clients to connect to our server at
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
}
|
||||
}
|
||||
|
||||
Do not add a `/` after the port in `proxy_pass`, otherwise nginx will canonicalise/normalise the URI.
|
||||
|
||||
* Caddy::
|
||||
|
||||
|
||||
@@ -205,9 +205,9 @@ listeners:
|
||||
#
|
||||
- port: 8008
|
||||
tls: false
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
type: http
|
||||
x_forwarded: true
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
@@ -278,6 +278,23 @@ listeners:
|
||||
# Used by phonehome stats to group together related servers.
|
||||
#server_context: context
|
||||
|
||||
# Resource-constrained Homeserver Settings
|
||||
#
|
||||
# If limit_remote_rooms.enabled is True, the room complexity will be
|
||||
# checked before a user joins a new remote room. If it is above
|
||||
# limit_remote_rooms.complexity, it will disallow joining or
|
||||
# instantly leave.
|
||||
#
|
||||
# limit_remote_rooms.complexity_error can be set to customise the text
|
||||
# displayed to the user when a room above the complexity threshold has
|
||||
# its join cancelled.
|
||||
#
|
||||
# Uncomment the below lines to enable:
|
||||
#limit_remote_rooms:
|
||||
# enabled: True
|
||||
# complexity: 1.0
|
||||
# complexity_error: "This room is too complex."
|
||||
|
||||
# Whether to require a user to be in the room to add an alias to it.
|
||||
# Defaults to 'true'.
|
||||
#
|
||||
@@ -375,10 +392,10 @@ listeners:
|
||||
# permission to listen on port 80.
|
||||
#
|
||||
acme:
|
||||
# ACME support is disabled by default. Uncomment the following line
|
||||
# (and tls_certificate_path and tls_private_key_path above) to enable it.
|
||||
# ACME support is disabled by default. Set this to `true` and uncomment
|
||||
# tls_certificate_path and tls_private_key_path above to enable it.
|
||||
#
|
||||
#enabled: true
|
||||
enabled: False
|
||||
|
||||
# Endpoint to use to request certificates. If you only want to test,
|
||||
# use Let's Encrypt's staging url:
|
||||
@@ -389,17 +406,17 @@ acme:
|
||||
# Port number to listen on for the HTTP-01 challenge. Change this if
|
||||
# you are forwarding connections through Apache/Nginx/etc.
|
||||
#
|
||||
#port: 80
|
||||
port: 80
|
||||
|
||||
# Local addresses to listen on for incoming connections.
|
||||
# Again, you may want to change this if you are forwarding connections
|
||||
# through Apache/Nginx/etc.
|
||||
#
|
||||
#bind_addresses: ['::', '0.0.0.0']
|
||||
bind_addresses: ['::', '0.0.0.0']
|
||||
|
||||
# How many days remaining on a certificate before it is renewed.
|
||||
#
|
||||
#reprovision_threshold: 30
|
||||
reprovision_threshold: 30
|
||||
|
||||
# The domain that the certificate should be for. Normally this
|
||||
# should be the same as your Matrix domain (i.e., 'server_name'), but,
|
||||
@@ -413,7 +430,7 @@ acme:
|
||||
#
|
||||
# If not set, defaults to your 'server_name'.
|
||||
#
|
||||
#domain: matrix.example.com
|
||||
domain: matrix.example.com
|
||||
|
||||
# file to use for the account key. This will be generated if it doesn't
|
||||
# exist.
|
||||
@@ -468,7 +485,8 @@ database:
|
||||
|
||||
## Logging ##
|
||||
|
||||
# A yaml python logging config file
|
||||
# A yaml python logging config file as described by
|
||||
# https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
|
||||
#
|
||||
log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
|
||||
@@ -548,6 +566,13 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
|
||||
|
||||
|
||||
## Media Store ##
|
||||
|
||||
# Enable the media store service in the Synapse master. Uncomment the
|
||||
# following if you are using a separate media store worker.
|
||||
#
|
||||
#enable_media_repo: false
|
||||
|
||||
# Directory where uploaded images and attachments are stored.
|
||||
#
|
||||
media_store_path: "DATADIR/media_store"
|
||||
@@ -785,6 +810,27 @@ uploads_path: "DATADIR/uploads"
|
||||
# period: 6w
|
||||
# renew_at: 1w
|
||||
# renew_email_subject: "Renew your %(app)s account"
|
||||
# # Directory in which Synapse will try to find the HTML files to serve to the
|
||||
# # user when trying to renew an account. Optional, defaults to
|
||||
# # synapse/res/templates.
|
||||
# template_dir: "res/templates"
|
||||
# # HTML to be displayed to the user after they successfully renewed their
|
||||
# # account. Optional.
|
||||
# account_renewed_html_path: "account_renewed.html"
|
||||
# # HTML to be displayed when the user tries to renew an account with an invalid
|
||||
# # renewal token. Optional.
|
||||
# invalid_token_html_path: "invalid_token.html"
|
||||
|
||||
# Time that a user's session remains valid for, after they log in.
|
||||
#
|
||||
# Note that this is not currently compatible with guest logins.
|
||||
#
|
||||
# Note also that this is calculated at login time: changes are not applied
|
||||
# retrospectively to users who have already logged in.
|
||||
#
|
||||
# By default, this is infinite.
|
||||
#
|
||||
#session_lifetime: 24h
|
||||
|
||||
# The user must provide all of the below types of 3PID when registering.
|
||||
#
|
||||
@@ -914,10 +960,6 @@ uploads_path: "DATADIR/uploads"
|
||||
#
|
||||
# macaroon_secret_key: <PRIVATE STRING>
|
||||
|
||||
# Used to enable access token expiration.
|
||||
#
|
||||
#expire_access_token: False
|
||||
|
||||
# a secret which is used to calculate HMACs for form values, to stop
|
||||
# falsification of values. Must be specified for the User Consent
|
||||
# forms to work.
|
||||
@@ -986,6 +1028,14 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key"
|
||||
#
|
||||
#trusted_key_servers:
|
||||
# - server_name: "matrix.org"
|
||||
#
|
||||
|
||||
# The signing keys to use when acting as a trusted key server. If not specified
|
||||
# defaults to the server signing key.
|
||||
#
|
||||
# Can contain multiple keys, one per line.
|
||||
#
|
||||
#key_server_signing_keys_path: "key_server_signing_keys.key"
|
||||
|
||||
|
||||
# Enable SAML2 for registration and login. Uses pysaml2.
|
||||
@@ -1395,3 +1445,43 @@ password_config:
|
||||
# module: "my_custom_project.SuperRulesSet"
|
||||
# config:
|
||||
# example_option: 'things'
|
||||
|
||||
|
||||
## Opentracing ##
|
||||
|
||||
# These settings enable opentracing, which implements distributed tracing.
|
||||
# This allows you to observe the causal chains of events across servers
|
||||
# including requests, key lookups etc., across any server running
|
||||
# synapse or any other other services which supports opentracing
|
||||
# (specifically those implemented with Jaeger).
|
||||
#
|
||||
opentracing:
|
||||
# tracing is disabled by default. Uncomment the following line to enable it.
|
||||
#
|
||||
#enabled: true
|
||||
|
||||
# The list of homeservers we wish to send and receive span contexts and span baggage.
|
||||
# See docs/opentracing.rst
|
||||
# This is a list of regexes which are matched against the server_name of the
|
||||
# homeserver.
|
||||
#
|
||||
# By defult, it is empty, so no servers are matched.
|
||||
#
|
||||
#homeserver_whitelist:
|
||||
# - ".*"
|
||||
|
||||
# Jaeger can be configured to sample traces at different rates.
|
||||
# All configuration options provided by Jaeger can be set here.
|
||||
# Jaeger's configuration mostly related to trace sampling which
|
||||
# is documented here:
|
||||
# https://www.jaegertracing.io/docs/1.13/sampling/.
|
||||
#
|
||||
#jaeger_config:
|
||||
# sampler:
|
||||
# type: const
|
||||
# param: 1
|
||||
|
||||
# Logging whether spans were started and reported
|
||||
#
|
||||
# logging:
|
||||
# false
|
||||
|
||||
83
docs/structured_logging.md
Normal file
83
docs/structured_logging.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Structured Logging
|
||||
|
||||
A structured logging system can be useful when your logs are destined for a machine to parse and process. By maintaining its machine-readable characteristics, it enables more efficient searching and aggregations when consumed by software such as the "ELK stack".
|
||||
|
||||
Synapse's structured logging system is configured via the file that Synapse's `log_config` config option points to. The file must be YAML and contain `structured: true`. It must contain a list of "drains" (places where logs go to).
|
||||
|
||||
A structured logging configuration looks similar to the following:
|
||||
|
||||
```yaml
|
||||
structured: true
|
||||
|
||||
loggers:
|
||||
synapse:
|
||||
level: INFO
|
||||
synapse.storage.SQL:
|
||||
level: WARNING
|
||||
|
||||
drains:
|
||||
console:
|
||||
type: console
|
||||
location: stdout
|
||||
file:
|
||||
type: file_json
|
||||
location: homeserver.log
|
||||
```
|
||||
|
||||
The above logging config will set Synapse as 'INFO' logging level by default, with the SQL layer at 'WARNING', and will have two logging drains (to the console and to a file, stored as JSON).
|
||||
|
||||
## Drain Types
|
||||
|
||||
Drain types can be specified by the `type` key.
|
||||
|
||||
### `console`
|
||||
|
||||
Outputs human-readable logs to the console.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: Either `stdout` or `stderr`.
|
||||
|
||||
### `console_json`
|
||||
|
||||
Outputs machine-readable JSON logs to the console.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: Either `stdout` or `stderr`.
|
||||
|
||||
### `console_json_terse`
|
||||
|
||||
Outputs machine-readable JSON logs to the console, separated by newlines. This
|
||||
format is not designed to be read and re-formatted into human-readable text, but
|
||||
is optimal for a logging aggregation system.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: Either `stdout` or `stderr`.
|
||||
|
||||
### `file`
|
||||
|
||||
Outputs human-readable logs to a file.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: An absolute path to the file to log to.
|
||||
|
||||
### `file_json`
|
||||
|
||||
Outputs machine-readable logs to a file.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: An absolute path to the file to log to.
|
||||
|
||||
### `network_json_terse`
|
||||
|
||||
Delivers machine-readable JSON logs to a log aggregator over TCP. This is
|
||||
compatible with LogStash's TCP input with the codec set to `json_lines`.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `host`: Hostname or IP address of the log aggregator.
|
||||
- `port`: Numerical port to contact on the host.
|
||||
@@ -206,6 +206,13 @@ Handles the media repository. It can handle all endpoints starting with::
|
||||
|
||||
/_matrix/media/
|
||||
|
||||
And the following regular expressions matching media-specific administration
|
||||
APIs::
|
||||
|
||||
^/_synapse/admin/v1/purge_media_cache$
|
||||
^/_synapse/admin/v1/room/.*/media$
|
||||
^/_synapse/admin/v1/quarantine_media/.*$
|
||||
|
||||
You should also set ``enable_media_repo: False`` in the shared configuration
|
||||
file to stop the main synapse running background jobs related to managing the
|
||||
media repository.
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -eux
|
||||
|
||||
cd "`dirname $0`/.."
|
||||
|
||||
TOX_DIR=$WORKSPACE/.tox
|
||||
|
||||
mkdir -p $TOX_DIR
|
||||
|
||||
if ! [ $TOX_DIR -ef .tox ]; then
|
||||
ln -s "$TOX_DIR" .tox
|
||||
fi
|
||||
|
||||
# set up the virtualenv
|
||||
tox -e py27 --notest -v
|
||||
@@ -14,6 +14,11 @@
|
||||
name = "Bugfixes"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "docker"
|
||||
name = "Updates to the Docker image"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "doc"
|
||||
name = "Improved Documentation"
|
||||
@@ -39,6 +44,8 @@ exclude = '''
|
||||
| \.git # root of the project
|
||||
| \.tox
|
||||
| \.venv
|
||||
| \.env
|
||||
| env
|
||||
| _build
|
||||
| _trial_temp.*
|
||||
| build
|
||||
|
||||
12
scripts-dev/lint.sh
Executable file
12
scripts-dev/lint.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Runs linting scripts over the local Synapse checkout
|
||||
# isort - sorts import statements
|
||||
# flake8 - lints and finds mistakes
|
||||
# black - opinionated code formatter
|
||||
|
||||
set -e
|
||||
|
||||
isort -y -rc synapse tests scripts-dev scripts
|
||||
flake8 synapse tests
|
||||
python3 -m black synapse tests scripts-dev scripts
|
||||
@@ -35,4 +35,4 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.1.0rc1"
|
||||
__version__ = "1.3.1"
|
||||
|
||||
@@ -22,10 +22,17 @@ from netaddr import IPAddress
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
import synapse.logging.opentracing as opentracing
|
||||
import synapse.types
|
||||
from synapse import event_auth
|
||||
from synapse.api.constants import EventTypes, JoinRules, Membership
|
||||
from synapse.api.errors import AuthError, Codes, ResourceLimitError
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
InvalidClientTokenError,
|
||||
MissingClientTokenError,
|
||||
ResourceLimitError,
|
||||
)
|
||||
from synapse.config.server import is_threepid_reserved
|
||||
from synapse.types import UserID
|
||||
from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache
|
||||
@@ -63,7 +70,6 @@ class Auth(object):
|
||||
self.clock = hs.get_clock()
|
||||
self.store = hs.get_datastore()
|
||||
self.state = hs.get_state_handler()
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS = 401
|
||||
|
||||
self.token_cache = LruCache(CACHE_SIZE_FACTOR * 10000)
|
||||
register_cache("cache", "token_cache", self.token_cache)
|
||||
@@ -123,7 +129,7 @@ class Auth(object):
|
||||
)
|
||||
|
||||
self._check_joined_room(member, user_id, room_id)
|
||||
defer.returnValue(member)
|
||||
return member
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_user_was_in_room(self, room_id, user_id):
|
||||
@@ -151,13 +157,13 @@ class Auth(object):
|
||||
if forgot:
|
||||
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||
|
||||
defer.returnValue(member)
|
||||
return member
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_host_in_room(self, room_id, host):
|
||||
with Measure(self.clock, "check_host_in_room"):
|
||||
latest_event_ids = yield self.store.is_host_joined(room_id, host)
|
||||
defer.returnValue(latest_event_ids)
|
||||
return latest_event_ids
|
||||
|
||||
def _check_joined_room(self, member, user_id, room_id):
|
||||
if not member or member.membership != Membership.JOIN:
|
||||
@@ -173,6 +179,7 @@ class Auth(object):
|
||||
def get_public_keys(self, invite_event):
|
||||
return event_auth.get_public_keys(invite_event)
|
||||
|
||||
@opentracing.trace
|
||||
@defer.inlineCallbacks
|
||||
def get_user_by_req(
|
||||
self, request, allow_guest=False, rights="access", allow_expired=False
|
||||
@@ -189,22 +196,22 @@ class Auth(object):
|
||||
Returns:
|
||||
defer.Deferred: resolves to a ``synapse.types.Requester`` object
|
||||
Raises:
|
||||
AuthError if no user by that token exists or the token is invalid.
|
||||
InvalidClientCredentialsError if no user by that token exists or the token
|
||||
is invalid.
|
||||
AuthError if access is denied for the user in the access token
|
||||
"""
|
||||
# Can optionally look elsewhere in the request (e.g. headers)
|
||||
try:
|
||||
ip_addr = self.hs.get_ip_from_request(request)
|
||||
user_agent = request.requestHeaders.getRawHeaders(
|
||||
b"User-Agent", default=[b""]
|
||||
)[0].decode("ascii", "surrogateescape")
|
||||
|
||||
access_token = self.get_access_token_from_request(
|
||||
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
||||
)
|
||||
access_token = self.get_access_token_from_request(request)
|
||||
|
||||
user_id, app_service = yield self._get_appservice_user_id(request)
|
||||
if user_id:
|
||||
request.authenticated_entity = user_id
|
||||
opentracing.set_tag("authenticated_entity", user_id)
|
||||
|
||||
if ip_addr and self.hs.config.track_appservice_user_ips:
|
||||
yield self.store.insert_client_ip(
|
||||
@@ -215,9 +222,7 @@ class Auth(object):
|
||||
device_id="dummy-device", # stubbed
|
||||
)
|
||||
|
||||
defer.returnValue(
|
||||
synapse.types.create_requester(user_id, app_service=app_service)
|
||||
)
|
||||
return synapse.types.create_requester(user_id, app_service=app_service)
|
||||
|
||||
user_info = yield self.get_user_by_access_token(access_token, rights)
|
||||
user = user_info["user"]
|
||||
@@ -257,46 +262,39 @@ class Auth(object):
|
||||
)
|
||||
|
||||
request.authenticated_entity = user.to_string()
|
||||
opentracing.set_tag("authenticated_entity", user.to_string())
|
||||
|
||||
defer.returnValue(
|
||||
synapse.types.create_requester(
|
||||
user, token_id, is_guest, device_id, app_service=app_service
|
||||
)
|
||||
return synapse.types.create_requester(
|
||||
user, token_id, is_guest, device_id, app_service=app_service
|
||||
)
|
||||
except KeyError:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Missing access token.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
raise MissingClientTokenError()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_appservice_user_id(self, request):
|
||||
app_service = self.store.get_app_service_by_token(
|
||||
self.get_access_token_from_request(
|
||||
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
||||
)
|
||||
self.get_access_token_from_request(request)
|
||||
)
|
||||
if app_service is None:
|
||||
defer.returnValue((None, None))
|
||||
return None, None
|
||||
|
||||
if app_service.ip_range_whitelist:
|
||||
ip_address = IPAddress(self.hs.get_ip_from_request(request))
|
||||
if ip_address not in app_service.ip_range_whitelist:
|
||||
defer.returnValue((None, None))
|
||||
return None, None
|
||||
|
||||
if b"user_id" not in request.args:
|
||||
defer.returnValue((app_service.sender, app_service))
|
||||
return app_service.sender, app_service
|
||||
|
||||
user_id = request.args[b"user_id"][0].decode("utf8")
|
||||
if app_service.sender == user_id:
|
||||
defer.returnValue((app_service.sender, app_service))
|
||||
return app_service.sender, app_service
|
||||
|
||||
if not app_service.is_interested_in_user(user_id):
|
||||
raise AuthError(403, "Application service cannot masquerade as this user.")
|
||||
if not (yield self.store.get_user_by_id(user_id)):
|
||||
raise AuthError(403, "Application service has not registered this user")
|
||||
defer.returnValue((user_id, app_service))
|
||||
return user_id, app_service
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_user_by_access_token(self, token, rights="access"):
|
||||
@@ -313,14 +311,26 @@ class Auth(object):
|
||||
`token_id` (int|None): access token id. May be None if guest
|
||||
`device_id` (str|None): device corresponding to access token
|
||||
Raises:
|
||||
AuthError if no user by that token exists or the token is invalid.
|
||||
InvalidClientCredentialsError if no user by that token exists or the token
|
||||
is invalid.
|
||||
"""
|
||||
|
||||
if rights == "access":
|
||||
# first look in the database
|
||||
r = yield self._look_up_user_by_access_token(token)
|
||||
if r:
|
||||
defer.returnValue(r)
|
||||
valid_until_ms = r["valid_until_ms"]
|
||||
if (
|
||||
valid_until_ms is not None
|
||||
and valid_until_ms < self.clock.time_msec()
|
||||
):
|
||||
# there was a valid access token, but it has expired.
|
||||
# soft-logout the user.
|
||||
raise InvalidClientTokenError(
|
||||
msg="Access token has expired", soft_logout=True
|
||||
)
|
||||
|
||||
return r
|
||||
|
||||
# otherwise it needs to be a valid macaroon
|
||||
try:
|
||||
@@ -331,11 +341,7 @@ class Auth(object):
|
||||
if not guest:
|
||||
# non-guest access tokens must be in the database
|
||||
logger.warning("Unrecognised access token - not in store.")
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
raise InvalidClientTokenError()
|
||||
|
||||
# Guest access tokens are not stored in the database (there can
|
||||
# only be one access token per guest, anyway).
|
||||
@@ -350,16 +356,10 @@ class Auth(object):
|
||||
# guest tokens.
|
||||
stored_user = yield self.store.get_user_by_id(user_id)
|
||||
if not stored_user:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Unknown user_id %s" % user_id,
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
raise InvalidClientTokenError("Unknown user_id %s" % user_id)
|
||||
if not stored_user["is_guest"]:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Guest access token used for regular user",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
raise InvalidClientTokenError(
|
||||
"Guest access token used for regular user"
|
||||
)
|
||||
ret = {
|
||||
"user": user,
|
||||
@@ -378,7 +378,7 @@ class Auth(object):
|
||||
}
|
||||
else:
|
||||
raise RuntimeError("Unknown rights setting %s", rights)
|
||||
defer.returnValue(ret)
|
||||
return ret
|
||||
except (
|
||||
_InvalidMacaroonException,
|
||||
pymacaroons.exceptions.MacaroonException,
|
||||
@@ -386,11 +386,7 @@ class Auth(object):
|
||||
ValueError,
|
||||
) as e:
|
||||
logger.warning("Invalid macaroon in auth: %s %s", type(e), e)
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Invalid macaroon passed.",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
raise InvalidClientTokenError("Invalid macaroon passed.")
|
||||
|
||||
def _parse_and_validate_macaroon(self, token, rights="access"):
|
||||
"""Takes a macaroon and tries to parse and validate it. This is cached
|
||||
@@ -418,25 +414,16 @@ class Auth(object):
|
||||
try:
|
||||
user_id = self.get_user_id_from_macaroon(macaroon)
|
||||
|
||||
has_expiry = False
|
||||
guest = False
|
||||
for caveat in macaroon.caveats:
|
||||
if caveat.caveat_id.startswith("time "):
|
||||
has_expiry = True
|
||||
elif caveat.caveat_id == "guest = true":
|
||||
if caveat.caveat_id == "guest = true":
|
||||
guest = True
|
||||
|
||||
self.validate_macaroon(
|
||||
macaroon, rights, self.hs.config.expire_access_token, user_id=user_id
|
||||
)
|
||||
self.validate_macaroon(macaroon, rights, user_id=user_id)
|
||||
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Invalid macaroon passed.",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
raise InvalidClientTokenError("Invalid macaroon passed.")
|
||||
|
||||
if not has_expiry and rights == "access":
|
||||
if rights == "access":
|
||||
self.token_cache[token] = (user_id, guest)
|
||||
|
||||
return user_id, guest
|
||||
@@ -453,19 +440,16 @@ class Auth(object):
|
||||
(str) user id
|
||||
|
||||
Raises:
|
||||
AuthError if there is no user_id caveat in the macaroon
|
||||
InvalidClientCredentialsError if there is no user_id caveat in the
|
||||
macaroon
|
||||
"""
|
||||
user_prefix = "user_id = "
|
||||
for caveat in macaroon.caveats:
|
||||
if caveat.caveat_id.startswith(user_prefix):
|
||||
return caveat.caveat_id[len(user_prefix) :]
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"No user caveat in macaroon",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
raise InvalidClientTokenError("No user caveat in macaroon")
|
||||
|
||||
def validate_macaroon(self, macaroon, type_string, verify_expiry, user_id):
|
||||
def validate_macaroon(self, macaroon, type_string, user_id):
|
||||
"""
|
||||
validate that a Macaroon is understood by and was signed by this server.
|
||||
|
||||
@@ -473,7 +457,6 @@ class Auth(object):
|
||||
macaroon(pymacaroons.Macaroon): The macaroon to validate
|
||||
type_string(str): The kind of token required (e.g. "access",
|
||||
"delete_pusher")
|
||||
verify_expiry(bool): Whether to verify whether the macaroon has expired.
|
||||
user_id (str): The user_id required
|
||||
"""
|
||||
v = pymacaroons.Verifier()
|
||||
@@ -486,19 +469,7 @@ class Auth(object):
|
||||
v.satisfy_exact("type = " + type_string)
|
||||
v.satisfy_exact("user_id = %s" % user_id)
|
||||
v.satisfy_exact("guest = true")
|
||||
|
||||
# verify_expiry should really always be True, but there exist access
|
||||
# tokens in the wild which expire when they should not, so we can't
|
||||
# enforce expiry yet (so we have to allow any caveat starting with
|
||||
# 'time < ' in access tokens).
|
||||
#
|
||||
# On the other hand, short-term login tokens (as used by CAS login, for
|
||||
# example) have an expiry time which we do want to enforce.
|
||||
|
||||
if verify_expiry:
|
||||
v.satisfy_general(self._verify_expiry)
|
||||
else:
|
||||
v.satisfy_general(lambda c: c.startswith("time < "))
|
||||
v.satisfy_general(self._verify_expiry)
|
||||
|
||||
# access_tokens include a nonce for uniqueness: any value is acceptable
|
||||
v.satisfy_general(lambda c: c.startswith("nonce = "))
|
||||
@@ -517,7 +488,7 @@ class Auth(object):
|
||||
def _look_up_user_by_access_token(self, token):
|
||||
ret = yield self.store.get_user_by_access_token(token)
|
||||
if not ret:
|
||||
defer.returnValue(None)
|
||||
return None
|
||||
|
||||
# we use ret.get() below because *lots* of unit tests stub out
|
||||
# get_user_by_access_token in a way where it only returns a couple of
|
||||
@@ -527,26 +498,18 @@ class Auth(object):
|
||||
"token_id": ret.get("token_id", None),
|
||||
"is_guest": False,
|
||||
"device_id": ret.get("device_id"),
|
||||
"valid_until_ms": ret.get("valid_until_ms"),
|
||||
}
|
||||
defer.returnValue(user_info)
|
||||
return user_info
|
||||
|
||||
def get_appservice_by_req(self, request):
|
||||
try:
|
||||
token = self.get_access_token_from_request(
|
||||
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
||||
)
|
||||
service = self.store.get_app_service_by_token(token)
|
||||
if not service:
|
||||
logger.warn("Unrecognised appservice access token.")
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
request.authenticated_entity = service.sender
|
||||
return defer.succeed(service)
|
||||
except KeyError:
|
||||
raise AuthError(self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.")
|
||||
token = self.get_access_token_from_request(request)
|
||||
service = self.store.get_app_service_by_token(token)
|
||||
if not service:
|
||||
logger.warn("Unrecognised appservice access token.")
|
||||
raise InvalidClientTokenError()
|
||||
request.authenticated_entity = service.sender
|
||||
return defer.succeed(service)
|
||||
|
||||
def is_server_admin(self, user):
|
||||
""" Check if the given user is a local server admin.
|
||||
@@ -562,7 +525,7 @@ class Auth(object):
|
||||
@defer.inlineCallbacks
|
||||
def compute_auth_events(self, event, current_state_ids, for_verification=False):
|
||||
if event.type == EventTypes.Create:
|
||||
defer.returnValue([])
|
||||
return []
|
||||
|
||||
auth_ids = []
|
||||
|
||||
@@ -623,22 +586,7 @@ class Auth(object):
|
||||
if member_event.content["membership"] == Membership.JOIN:
|
||||
auth_ids.append(member_event.event_id)
|
||||
|
||||
defer.returnValue(auth_ids)
|
||||
|
||||
def check_redaction(self, room_version, event, auth_events):
|
||||
"""Check whether the event sender is allowed to redact the target event.
|
||||
|
||||
Returns:
|
||||
True if the the sender is allowed to redact the target event if the
|
||||
target event was created by them.
|
||||
False if the sender is allowed to redact the target event with no
|
||||
further checks.
|
||||
|
||||
Raises:
|
||||
AuthError if the event sender is definitely not allowed to redact
|
||||
the target event.
|
||||
"""
|
||||
return event_auth.check_redaction(room_version, event, auth_events)
|
||||
return auth_ids
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_can_change_room_list(self, room_id, user):
|
||||
@@ -652,7 +600,7 @@ class Auth(object):
|
||||
|
||||
is_admin = yield self.is_server_admin(user)
|
||||
if is_admin:
|
||||
defer.returnValue(True)
|
||||
return True
|
||||
|
||||
user_id = user.to_string()
|
||||
yield self.check_joined_room(room_id, user_id)
|
||||
@@ -692,20 +640,16 @@ class Auth(object):
|
||||
return bool(query_params) or bool(auth_headers)
|
||||
|
||||
@staticmethod
|
||||
def get_access_token_from_request(request, token_not_found_http_status=401):
|
||||
def get_access_token_from_request(request):
|
||||
"""Extracts the access_token from the request.
|
||||
|
||||
Args:
|
||||
request: The http request.
|
||||
token_not_found_http_status(int): The HTTP status code to set in the
|
||||
AuthError if the token isn't found. This is used in some of the
|
||||
legacy APIs to change the status code to 403 from the default of
|
||||
401 since some of the old clients depended on auth errors returning
|
||||
403.
|
||||
Returns:
|
||||
unicode: The access_token
|
||||
Raises:
|
||||
AuthError: If there isn't an access_token in the request.
|
||||
MissingClientTokenError: If there isn't a single access_token in the
|
||||
request
|
||||
"""
|
||||
|
||||
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
|
||||
@@ -714,34 +658,20 @@ class Auth(object):
|
||||
# Try the get the access_token from a "Authorization: Bearer"
|
||||
# header
|
||||
if query_params is not None:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Mixing Authorization headers and access_token query parameters.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
raise MissingClientTokenError(
|
||||
"Mixing Authorization headers and access_token query parameters."
|
||||
)
|
||||
if len(auth_headers) > 1:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Too many Authorization headers.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
raise MissingClientTokenError("Too many Authorization headers.")
|
||||
parts = auth_headers[0].split(b" ")
|
||||
if parts[0] == b"Bearer" and len(parts) == 2:
|
||||
return parts[1].decode("ascii")
|
||||
else:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Invalid Authorization header.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
raise MissingClientTokenError("Invalid Authorization header.")
|
||||
else:
|
||||
# Try to get the access_token from the query params.
|
||||
if not query_params:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Missing access token.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
raise MissingClientTokenError()
|
||||
|
||||
return query_params[0].decode("ascii")
|
||||
|
||||
@@ -764,7 +694,7 @@ class Auth(object):
|
||||
# * The user is a guest user, and has joined the room
|
||||
# else it will throw.
|
||||
member_event = yield self.check_user_was_in_room(room_id, user_id)
|
||||
defer.returnValue((member_event.membership, member_event.event_id))
|
||||
return member_event.membership, member_event.event_id
|
||||
except AuthError:
|
||||
visibility = yield self.state.get_current_state(
|
||||
room_id, EventTypes.RoomHistoryVisibility, ""
|
||||
@@ -773,7 +703,7 @@ class Auth(object):
|
||||
visibility
|
||||
and visibility.content["history_visibility"] == "world_readable"
|
||||
):
|
||||
defer.returnValue((Membership.JOIN, None))
|
||||
return Membership.JOIN, None
|
||||
return
|
||||
raise AuthError(
|
||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
||||
|
||||
@@ -122,7 +122,8 @@ class UserTypes(object):
|
||||
"""
|
||||
|
||||
SUPPORT = "support"
|
||||
ALL_USER_TYPES = (SUPPORT,)
|
||||
BOT = "bot"
|
||||
ALL_USER_TYPES = (SUPPORT, BOT)
|
||||
|
||||
|
||||
class RelationTypes(object):
|
||||
|
||||
@@ -61,6 +61,7 @@ class Codes(object):
|
||||
INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION"
|
||||
WRONG_ROOM_KEYS_VERSION = "M_WRONG_ROOM_KEYS_VERSION"
|
||||
EXPIRED_ACCOUNT = "ORG_MATRIX_EXPIRED_ACCOUNT"
|
||||
USER_DEACTIVATED = "M_USER_DEACTIVATED"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
@@ -139,6 +140,22 @@ class ConsentNotGivenError(SynapseError):
|
||||
return cs_error(self.msg, self.errcode, consent_uri=self._consent_uri)
|
||||
|
||||
|
||||
class UserDeactivatedError(SynapseError):
|
||||
"""The error returned to the client when the user attempted to access an
|
||||
authenticated endpoint, but the account has been deactivated.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
"""Constructs a UserDeactivatedError
|
||||
|
||||
Args:
|
||||
msg (str): The human-readable error message
|
||||
"""
|
||||
super(UserDeactivatedError, self).__init__(
|
||||
code=http_client.FORBIDDEN, msg=msg, errcode=Codes.USER_DEACTIVATED
|
||||
)
|
||||
|
||||
|
||||
class RegistrationError(SynapseError):
|
||||
"""An error raised when a registration event fails."""
|
||||
|
||||
@@ -210,7 +227,9 @@ class NotFoundError(SynapseError):
|
||||
|
||||
|
||||
class AuthError(SynapseError):
|
||||
"""An error raised when there was a problem authorising an event."""
|
||||
"""An error raised when there was a problem authorising an event, and at various
|
||||
other poorly-defined times.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
@@ -218,6 +237,41 @@ class AuthError(SynapseError):
|
||||
super(AuthError, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class InvalidClientCredentialsError(SynapseError):
|
||||
"""An error raised when there was a problem with the authorisation credentials
|
||||
in a client request.
|
||||
|
||||
https://matrix.org/docs/spec/client_server/r0.5.0#using-access-tokens:
|
||||
|
||||
When credentials are required but missing or invalid, the HTTP call will
|
||||
return with a status of 401 and the error code, M_MISSING_TOKEN or
|
||||
M_UNKNOWN_TOKEN respectively.
|
||||
"""
|
||||
|
||||
def __init__(self, msg, errcode):
|
||||
super().__init__(code=401, msg=msg, errcode=errcode)
|
||||
|
||||
|
||||
class MissingClientTokenError(InvalidClientCredentialsError):
|
||||
"""Raised when we couldn't find the access token in a request"""
|
||||
|
||||
def __init__(self, msg="Missing access token"):
|
||||
super().__init__(msg=msg, errcode="M_MISSING_TOKEN")
|
||||
|
||||
|
||||
class InvalidClientTokenError(InvalidClientCredentialsError):
|
||||
"""Raised when we didn't understand the access token in a request"""
|
||||
|
||||
def __init__(self, msg="Unrecognised access token", soft_logout=False):
|
||||
super().__init__(msg=msg, errcode="M_UNKNOWN_TOKEN")
|
||||
self._soft_logout = soft_logout
|
||||
|
||||
def error_dict(self):
|
||||
d = super().error_dict()
|
||||
d["soft_logout"] = self._soft_logout
|
||||
return d
|
||||
|
||||
|
||||
class ResourceLimitError(SynapseError):
|
||||
"""
|
||||
Any error raised when there is a problem with resource usage.
|
||||
|
||||
@@ -132,7 +132,7 @@ class Filtering(object):
|
||||
@defer.inlineCallbacks
|
||||
def get_user_filter(self, user_localpart, filter_id):
|
||||
result = yield self.store.get_user_filter(user_localpart, filter_id)
|
||||
defer.returnValue(FilterCollection(result))
|
||||
return FilterCollection(result)
|
||||
|
||||
def add_user_filter(self, user_localpart, user_filter):
|
||||
self.check_valid_filter(user_filter)
|
||||
|
||||
@@ -15,7 +15,9 @@
|
||||
|
||||
import gc
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import socket
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
@@ -27,28 +29,30 @@ from twisted.protocols.tls import TLSMemoryBIOFactory
|
||||
import synapse
|
||||
from synapse.app import check_bind_error
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.util import PreserveLoggingContext
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.rlimit import change_resource_limit
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# list of tuples of function, args list, kwargs dict
|
||||
_sighup_callbacks = []
|
||||
|
||||
|
||||
def register_sighup(func):
|
||||
def register_sighup(func, *args, **kwargs):
|
||||
"""
|
||||
Register a function to be called when a SIGHUP occurs.
|
||||
|
||||
Args:
|
||||
func (function): Function to be called when sent a SIGHUP signal.
|
||||
Will be called with a single argument, the homeserver.
|
||||
Will be called with a single default argument, the homeserver.
|
||||
*args, **kwargs: args and kwargs to be passed to the target function.
|
||||
"""
|
||||
_sighup_callbacks.append(func)
|
||||
_sighup_callbacks.append((func, args, kwargs))
|
||||
|
||||
|
||||
def start_worker_reactor(appname, config):
|
||||
def start_worker_reactor(appname, config, run_command=reactor.run):
|
||||
""" Run the reactor in the main process
|
||||
|
||||
Daemonizes if necessary, and then configures some resources, before starting
|
||||
@@ -57,6 +61,7 @@ def start_worker_reactor(appname, config):
|
||||
Args:
|
||||
appname (str): application name which will be sent to syslog
|
||||
config (synapse.config.Config): config object
|
||||
run_command (Callable[]): callable that actually runs the reactor
|
||||
"""
|
||||
|
||||
logger = logging.getLogger(config.worker_app)
|
||||
@@ -69,11 +74,19 @@ def start_worker_reactor(appname, config):
|
||||
daemonize=config.worker_daemonize,
|
||||
print_pidfile=config.print_pidfile,
|
||||
logger=logger,
|
||||
run_command=run_command,
|
||||
)
|
||||
|
||||
|
||||
def start_reactor(
|
||||
appname, soft_file_limit, gc_thresholds, pid_file, daemonize, print_pidfile, logger
|
||||
appname,
|
||||
soft_file_limit,
|
||||
gc_thresholds,
|
||||
pid_file,
|
||||
daemonize,
|
||||
print_pidfile,
|
||||
logger,
|
||||
run_command=reactor.run,
|
||||
):
|
||||
""" Run the reactor in the main process
|
||||
|
||||
@@ -88,38 +101,42 @@ def start_reactor(
|
||||
daemonize (bool): true to run the reactor in a background process
|
||||
print_pidfile (bool): whether to print the pid file, if daemonize is True
|
||||
logger (logging.Logger): logger instance to pass to Daemonize
|
||||
run_command (Callable[]): callable that actually runs the reactor
|
||||
"""
|
||||
|
||||
install_dns_limiter(reactor)
|
||||
|
||||
def run():
|
||||
# make sure that we run the reactor with the sentinel log context,
|
||||
# otherwise other PreserveLoggingContext instances will get confused
|
||||
# and complain when they see the logcontext arbitrarily swapping
|
||||
# between the sentinel and `run` logcontexts.
|
||||
with PreserveLoggingContext():
|
||||
logger.info("Running")
|
||||
logger.info("Running")
|
||||
change_resource_limit(soft_file_limit)
|
||||
if gc_thresholds:
|
||||
gc.set_threshold(*gc_thresholds)
|
||||
run_command()
|
||||
|
||||
change_resource_limit(soft_file_limit)
|
||||
if gc_thresholds:
|
||||
gc.set_threshold(*gc_thresholds)
|
||||
reactor.run()
|
||||
# make sure that we run the reactor with the sentinel log context,
|
||||
# otherwise other PreserveLoggingContext instances will get confused
|
||||
# and complain when they see the logcontext arbitrarily swapping
|
||||
# between the sentinel and `run` logcontexts.
|
||||
#
|
||||
# We also need to drop the logcontext before forking if we're daemonizing,
|
||||
# otherwise the cputime metrics get confused about the per-thread resource usage
|
||||
# appearing to go backwards.
|
||||
with PreserveLoggingContext():
|
||||
if daemonize:
|
||||
if print_pidfile:
|
||||
print(pid_file)
|
||||
|
||||
if daemonize:
|
||||
if print_pidfile:
|
||||
print(pid_file)
|
||||
|
||||
daemon = Daemonize(
|
||||
app=appname,
|
||||
pid=pid_file,
|
||||
action=run,
|
||||
auto_close_fds=False,
|
||||
verbose=True,
|
||||
logger=logger,
|
||||
)
|
||||
daemon.start()
|
||||
else:
|
||||
run()
|
||||
daemon = Daemonize(
|
||||
app=appname,
|
||||
pid=pid_file,
|
||||
action=run,
|
||||
auto_close_fds=False,
|
||||
verbose=True,
|
||||
logger=logger,
|
||||
)
|
||||
daemon.start()
|
||||
else:
|
||||
run()
|
||||
|
||||
|
||||
def quit_with_error(error_string):
|
||||
@@ -136,8 +153,7 @@ def listen_metrics(bind_addresses, port):
|
||||
"""
|
||||
Start Prometheus metrics server.
|
||||
"""
|
||||
from synapse.metrics import RegistryProxy
|
||||
from prometheus_client import start_http_server
|
||||
from synapse.metrics import RegistryProxy, start_http_server
|
||||
|
||||
for host in bind_addresses:
|
||||
logger.info("Starting metrics listener on %s:%d", host, port)
|
||||
@@ -230,8 +246,14 @@ def start(hs, listeners=None):
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
|
||||
def handle_sighup(*args, **kwargs):
|
||||
for i in _sighup_callbacks:
|
||||
i(hs)
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
# we're not using systemd.
|
||||
sdnotify(b"RELOADING=1")
|
||||
|
||||
for i, args, kwargs in _sighup_callbacks:
|
||||
i(hs, *args, **kwargs)
|
||||
|
||||
sdnotify(b"READY=1")
|
||||
|
||||
signal.signal(signal.SIGHUP, handle_sighup)
|
||||
|
||||
@@ -240,11 +262,15 @@ def start(hs, listeners=None):
|
||||
# Load the certificate from disk.
|
||||
refresh_certificate(hs)
|
||||
|
||||
# Start the tracer
|
||||
synapse.logging.opentracing.init_tracer(hs.config)
|
||||
|
||||
# It is now safe to start your Synapse.
|
||||
hs.start_listening(listeners)
|
||||
hs.get_datastore().start_profiling()
|
||||
|
||||
setup_sentry(hs)
|
||||
setup_sdnotify(hs)
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
reactor = hs.get_reactor()
|
||||
@@ -277,6 +303,21 @@ def setup_sentry(hs):
|
||||
scope.set_tag("worker_name", name)
|
||||
|
||||
|
||||
def setup_sdnotify(hs):
|
||||
"""Adds process state hooks to tell systemd what we are up to.
|
||||
"""
|
||||
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
# we're not using systemd.
|
||||
hs.get_reactor().addSystemEventTrigger(
|
||||
"after", "startup", sdnotify, b"READY=1\nMAINPID=%i" % (os.getpid(),)
|
||||
)
|
||||
|
||||
hs.get_reactor().addSystemEventTrigger(
|
||||
"before", "shutdown", sdnotify, b"STOPPING=1"
|
||||
)
|
||||
|
||||
|
||||
def install_dns_limiter(reactor, max_dns_requests_in_flight=100):
|
||||
"""Replaces the resolver with one that limits the number of in flight DNS
|
||||
requests.
|
||||
@@ -370,3 +411,35 @@ class _DeferredResolutionReceiver(object):
|
||||
def resolutionComplete(self):
|
||||
self._deferred.callback(())
|
||||
self._receiver.resolutionComplete()
|
||||
|
||||
|
||||
sdnotify_sockaddr = os.getenv("NOTIFY_SOCKET")
|
||||
|
||||
|
||||
def sdnotify(state):
|
||||
"""
|
||||
Send a notification to systemd, if the NOTIFY_SOCKET env var is set.
|
||||
|
||||
This function is based on the sdnotify python package, but since it's only a few
|
||||
lines of code, it's easier to duplicate it here than to add a dependency on a
|
||||
package which many OSes don't include as a matter of principle.
|
||||
|
||||
Args:
|
||||
state (bytes): notification to send
|
||||
"""
|
||||
if not isinstance(state, bytes):
|
||||
raise TypeError("sdnotify should be called with a bytes")
|
||||
if not sdnotify_sockaddr:
|
||||
return
|
||||
addr = sdnotify_sockaddr
|
||||
if addr[0] == "@":
|
||||
addr = "\0" + addr[1:]
|
||||
|
||||
try:
|
||||
with socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) as sock:
|
||||
sock.connect(addr)
|
||||
sock.sendall(state)
|
||||
except Exception as e:
|
||||
# this is a bit surprising, since we don't expect to have a NOTIFY_SOCKET
|
||||
# unless systemd is expecting us to notify it.
|
||||
logger.warning("Unable to send notification to systemd: %s", e)
|
||||
|
||||
264
synapse/app/admin_cmd.py
Normal file
264
synapse/app/admin_cmd.py
Normal file
@@ -0,0 +1,264 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2019 Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
from twisted.internet import defer, task
|
||||
|
||||
import synapse
|
||||
from synapse.app import _base
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.handlers.admin import ExfiltrationWriter
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
|
||||
from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore
|
||||
from synapse.replication.slave.storage.devices import SlavedDeviceStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.filtering import SlavedFilteringStore
|
||||
from synapse.replication.slave.storage.groups import SlavedGroupServerStore
|
||||
from synapse.replication.slave.storage.presence import SlavedPresenceStore
|
||||
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse.app.admin_cmd")
|
||||
|
||||
|
||||
class AdminCmdSlavedStore(
|
||||
SlavedReceiptsStore,
|
||||
SlavedAccountDataStore,
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedRegistrationStore,
|
||||
SlavedFilteringStore,
|
||||
SlavedPresenceStore,
|
||||
SlavedGroupServerStore,
|
||||
SlavedDeviceInboxStore,
|
||||
SlavedDeviceStore,
|
||||
SlavedPushRuleStore,
|
||||
SlavedEventStore,
|
||||
SlavedClientIpStore,
|
||||
RoomStore,
|
||||
BaseSlavedStore,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class AdminCmdServer(HomeServer):
|
||||
DATASTORE_CLASS = AdminCmdSlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
pass
|
||||
|
||||
def start_listening(self, listeners):
|
||||
pass
|
||||
|
||||
def build_tcp_replication(self):
|
||||
return AdminCmdReplicationHandler(self)
|
||||
|
||||
|
||||
class AdminCmdReplicationHandler(ReplicationClientHandler):
|
||||
@defer.inlineCallbacks
|
||||
def on_rdata(self, stream_name, token, rows):
|
||||
pass
|
||||
|
||||
def get_streams_to_replicate(self):
|
||||
return {}
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def export_data_command(hs, args):
|
||||
"""Export data for a user.
|
||||
|
||||
Args:
|
||||
hs (HomeServer)
|
||||
args (argparse.Namespace)
|
||||
"""
|
||||
|
||||
user_id = args.user_id
|
||||
directory = args.output_directory
|
||||
|
||||
res = yield hs.get_handlers().admin_handler.export_user_data(
|
||||
user_id, FileExfiltrationWriter(user_id, directory=directory)
|
||||
)
|
||||
print(res)
|
||||
|
||||
|
||||
class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
"""An ExfiltrationWriter that writes the users data to a directory.
|
||||
Returns the directory location on completion.
|
||||
|
||||
Note: This writes to disk on the main reactor thread.
|
||||
|
||||
Args:
|
||||
user_id (str): The user whose data is being exfiltrated.
|
||||
directory (str|None): The directory to write the data to, if None then
|
||||
will write to a temporary directory.
|
||||
"""
|
||||
|
||||
def __init__(self, user_id, directory=None):
|
||||
self.user_id = user_id
|
||||
|
||||
if directory:
|
||||
self.base_directory = directory
|
||||
else:
|
||||
self.base_directory = tempfile.mkdtemp(
|
||||
prefix="synapse-exfiltrate__%s__" % (user_id,)
|
||||
)
|
||||
|
||||
os.makedirs(self.base_directory, exist_ok=True)
|
||||
if list(os.listdir(self.base_directory)):
|
||||
raise Exception("Directory must be empty")
|
||||
|
||||
def write_events(self, room_id, events):
|
||||
room_directory = os.path.join(self.base_directory, "rooms", room_id)
|
||||
os.makedirs(room_directory, exist_ok=True)
|
||||
events_file = os.path.join(room_directory, "events")
|
||||
|
||||
with open(events_file, "a") as f:
|
||||
for event in events:
|
||||
print(json.dumps(event.get_pdu_json()), file=f)
|
||||
|
||||
def write_state(self, room_id, event_id, state):
|
||||
room_directory = os.path.join(self.base_directory, "rooms", room_id)
|
||||
state_directory = os.path.join(room_directory, "state")
|
||||
os.makedirs(state_directory, exist_ok=True)
|
||||
|
||||
event_file = os.path.join(state_directory, event_id)
|
||||
|
||||
with open(event_file, "a") as f:
|
||||
for event in state.values():
|
||||
print(json.dumps(event.get_pdu_json()), file=f)
|
||||
|
||||
def write_invite(self, room_id, event, state):
|
||||
self.write_events(room_id, [event])
|
||||
|
||||
# We write the invite state somewhere else as they aren't full events
|
||||
# and are only a subset of the state at the event.
|
||||
room_directory = os.path.join(self.base_directory, "rooms", room_id)
|
||||
os.makedirs(room_directory, exist_ok=True)
|
||||
|
||||
invite_state = os.path.join(room_directory, "invite_state")
|
||||
|
||||
with open(invite_state, "a") as f:
|
||||
for event in state.values():
|
||||
print(json.dumps(event), file=f)
|
||||
|
||||
def finished(self):
|
||||
return self.base_directory
|
||||
|
||||
|
||||
def start(config_options):
|
||||
parser = argparse.ArgumentParser(description="Synapse Admin Command")
|
||||
HomeServerConfig.add_arguments_to_parser(parser)
|
||||
|
||||
subparser = parser.add_subparsers(
|
||||
title="Admin Commands",
|
||||
required=True,
|
||||
dest="command",
|
||||
metavar="<admin_command>",
|
||||
help="The admin command to perform.",
|
||||
)
|
||||
export_data_parser = subparser.add_parser(
|
||||
"export-data", help="Export all data for a user"
|
||||
)
|
||||
export_data_parser.add_argument("user_id", help="User to extra data from")
|
||||
export_data_parser.add_argument(
|
||||
"--output-directory",
|
||||
action="store",
|
||||
metavar="DIRECTORY",
|
||||
required=False,
|
||||
help="The directory to store the exported data in. Must be empty. Defaults"
|
||||
" to creating a temp directory.",
|
||||
)
|
||||
export_data_parser.set_defaults(func=export_data_command)
|
||||
|
||||
try:
|
||||
config, args = HomeServerConfig.load_config_with_parser(parser, config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
if config.worker_app is not None:
|
||||
assert config.worker_app == "synapse.app.admin_cmd"
|
||||
|
||||
# Update the config with some basic overrides so that don't have to specify
|
||||
# a full worker config.
|
||||
config.worker_app = "synapse.app.admin_cmd"
|
||||
|
||||
if (
|
||||
not config.worker_daemonize
|
||||
and not config.worker_log_file
|
||||
and not config.worker_log_config
|
||||
):
|
||||
# Since we're meant to be run as a "command" let's not redirect stdio
|
||||
# unless we've actually set log config.
|
||||
config.no_redirect_stdio = True
|
||||
|
||||
# Explicitly disable background processes
|
||||
config.update_user_directory = False
|
||||
config.start_pushers = False
|
||||
config.send_federation = False
|
||||
|
||||
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
|
||||
ss = AdminCmdServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
|
||||
# We use task.react as the basic run command as it correctly handles tearing
|
||||
# down the reactor when the deferreds resolve and setting the return value.
|
||||
# We also make sure that `_base.start` gets run before we actually run the
|
||||
# command.
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run(_reactor):
|
||||
with LoggingContext("command"):
|
||||
yield _base.start(ss, [])
|
||||
yield args.func(ss, args)
|
||||
|
||||
_base.start_worker_reactor(
|
||||
"synapse-admin-cmd", config, run_command=lambda: task.react(run)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
@@ -26,8 +26,8 @@ from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.logging.context import LoggingContext, run_in_background
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.directory import DirectoryStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
@@ -36,7 +36,6 @@ from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
@@ -142,8 +141,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.appservice"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -168,8 +165,12 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ps, config, use_worker_options=True)
|
||||
|
||||
ps.setup()
|
||||
reactor.callWhenRunning(_base.start, ps, config.worker_listeners)
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ps, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-appservice", config)
|
||||
|
||||
|
||||
@@ -27,8 +27,8 @@ from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.http.server import JsonResource
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
@@ -64,7 +64,6 @@ from synapse.rest.client.versions import VersionsRestServlet
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
@@ -180,8 +179,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.client_reader"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -194,8 +191,12 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.callWhenRunning(_base.start, ss, config.worker_listeners)
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-client-reader", config)
|
||||
|
||||
|
||||
@@ -27,8 +27,8 @@ from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.http.server import JsonResource
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
@@ -59,7 +59,6 @@ from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.user_directory import UserDirectoryStore
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
@@ -176,8 +175,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_replication_http_port is not None
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
# This should only be done on the user directory worker or the master
|
||||
config.update_user_directory = False
|
||||
|
||||
@@ -193,8 +190,12 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.callWhenRunning(_base.start, ss, config.worker_listeners)
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-event-creator", config)
|
||||
|
||||
|
||||
@@ -28,8 +28,8 @@ from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
@@ -48,7 +48,6 @@ from synapse.rest.key.v2 import KeyApiV2Resource
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
@@ -161,8 +160,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.federation_reader"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -175,8 +172,12 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.callWhenRunning(_base.start, ss, config.worker_listeners)
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-federation-reader", config)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user