Compare commits
728 Commits
neilj/cont
...
v1.1.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
54283f3ed4 | ||
|
|
20332b278d | ||
|
|
f6608a8805 | ||
|
|
426854e7bc | ||
|
|
463d5a8fde | ||
|
|
91753cae59 | ||
|
|
c7b48bd42d | ||
|
|
0ee9076ffe | ||
|
|
f8b52eb8c5 | ||
|
|
7085e8c0fb | ||
|
|
a0fa4641c4 | ||
|
|
6eecb6e500 | ||
|
|
c3863ad6bf | ||
|
|
8134c49cad | ||
|
|
de8077a164 | ||
|
|
948488e115 | ||
|
|
9ceb4f0889 | ||
|
|
b4914681a5 | ||
|
|
b4fd86a9b4 | ||
|
|
3bcb13edd0 | ||
|
|
04196a4dae | ||
|
|
915280f1ed | ||
|
|
f40a7dc41f | ||
|
|
c7ff297dde | ||
|
|
15d9fc31bd | ||
|
|
b339f6489f | ||
|
|
01d0f8e701 | ||
|
|
071150ce19 | ||
|
|
be3b901ccd | ||
|
|
9646a593ac | ||
|
|
457b8e4c4d | ||
|
|
c548dbc4b1 | ||
|
|
e79ec03165 | ||
|
|
729f5a4fb6 | ||
|
|
555b6fa0d5 | ||
|
|
1ddc7b39c9 | ||
|
|
2f7ebc2a55 | ||
|
|
856ea04eb3 | ||
|
|
b4db70e167 | ||
|
|
b2d2617c0d | ||
|
|
b1b8a24b63 | ||
|
|
53faa6a429 | ||
|
|
02aeb5a98a | ||
|
|
42a3619ef4 | ||
|
|
a11475f396 | ||
|
|
47fa836abb | ||
|
|
79b9d9076d | ||
|
|
dde4118341 | ||
|
|
28db0ae537 | ||
|
|
a0acfcc73e | ||
|
|
36f4953dec | ||
|
|
536820e572 | ||
|
|
db56383b24 | ||
|
|
3705322103 | ||
|
|
0ade403f55 | ||
|
|
a4daa899ec | ||
|
|
82028d723b | ||
|
|
8624db3194 | ||
|
|
f335e77d53 | ||
|
|
806a06daf2 | ||
|
|
b051cefc75 | ||
|
|
befa116b31 | ||
|
|
28e30c6581 | ||
|
|
6347dc1bed | ||
|
|
a0f2921ccf | ||
|
|
7e433beb65 | ||
|
|
c58a6e6108 | ||
|
|
7c453472e4 | ||
|
|
a5fba9c27c | ||
|
|
a1732bbff9 | ||
|
|
043ab6da13 | ||
|
|
2d91988799 | ||
|
|
508c3ce3d7 | ||
|
|
a2f6d31a63 | ||
|
|
0e97284dfa | ||
|
|
3eb8c7b0eb | ||
|
|
52a4a90d05 | ||
|
|
5375c3a9b8 | ||
|
|
3f24e4dce7 | ||
|
|
b1fddb7f69 | ||
|
|
a52e1a3b6c | ||
|
|
ef8c62758c | ||
|
|
c8cb186260 | ||
|
|
a5222b386e | ||
|
|
62e361a90f | ||
|
|
1c4a38e377 | ||
|
|
6fa36c22fa | ||
|
|
a47f012501 | ||
|
|
abd334d27b | ||
|
|
dd1c722a39 | ||
|
|
fe2d876e2a | ||
|
|
f817fc9ad5 | ||
|
|
78c00ad3ff | ||
|
|
28604ab03d | ||
|
|
4ac7ef4b67 | ||
|
|
af8a962905 | ||
|
|
e59a8cd2e5 | ||
|
|
deb4fe6ef3 | ||
|
|
bfe84e051e | ||
|
|
25433f212d | ||
|
|
e6b2ccbb51 | ||
|
|
3f8a252dd8 | ||
|
|
6a92b06cbb | ||
|
|
c783294549 | ||
|
|
16b52642e2 | ||
|
|
7c2f8881a9 | ||
|
|
cf7aef1114 | ||
|
|
5d9c101551 | ||
|
|
367a8263b3 | ||
|
|
edea4bb5be | ||
|
|
c3c6b00d95 | ||
|
|
21bf4318b5 | ||
|
|
1793de6c6d | ||
|
|
14aff5cc0d | ||
|
|
dddf20e8e1 | ||
|
|
dc94773e60 | ||
|
|
5097aee740 | ||
|
|
c753c098dd | ||
|
|
6cda36777b | ||
|
|
e1a795758c | ||
|
|
03cea2b0fe | ||
|
|
37933a3bf8 | ||
|
|
5d6644efe0 | ||
|
|
0e8b35f7b0 | ||
|
|
2f8491daef | ||
|
|
8fecb5fcbf | ||
|
|
2b0bde935a | ||
|
|
7eadb74056 | ||
|
|
5f8a612af1 | ||
|
|
60b912cf0d | ||
|
|
8d452e0ca5 | ||
|
|
8181e290a9 | ||
|
|
626a26dbb8 | ||
|
|
f3ab533374 | ||
|
|
7456698241 | ||
|
|
f47969f42a | ||
|
|
f8bd30af2a | ||
|
|
45f28a9d2f | ||
|
|
32e7c9e7f2 | ||
|
|
ae4d97bf05 | ||
|
|
fe641df770 | ||
|
|
f682af052d | ||
|
|
68128d5626 | ||
|
|
15bf32e062 | ||
|
|
7dcf984075 | ||
|
|
e0be8d7016 | ||
|
|
65787b0f7c | ||
|
|
ceb2fa60a5 | ||
|
|
554609288b | ||
|
|
10f9e35b43 | ||
|
|
092e36457a | ||
|
|
8fcd2ca907 | ||
|
|
f2d2ae03da | ||
|
|
b093cfb02c | ||
|
|
8e7ef3a023 | ||
|
|
b36de88066 | ||
|
|
2b20d0fb59 | ||
|
|
19b80fe68a | ||
|
|
fc51e21326 | ||
|
|
b42f90470f | ||
|
|
16a3124b76 | ||
|
|
c9385dd238 | ||
|
|
82d9d524bd | ||
|
|
d6328e03fd | ||
|
|
33ea87be39 | ||
|
|
9ce4220d6c | ||
|
|
112cf5a73a | ||
|
|
8353ddd951 | ||
|
|
160c52d0d4 | ||
|
|
4c4877bbc1 | ||
|
|
ff88d36dcb | ||
|
|
499d4a32cd | ||
|
|
25d16fea78 | ||
|
|
187d2837a9 | ||
|
|
2d6308a043 | ||
|
|
839f9b9231 | ||
|
|
eba7caf09f | ||
|
|
6840ebeef8 | ||
|
|
dd927b29e1 | ||
|
|
414d2ca3a6 | ||
|
|
97d7e4c7b7 | ||
|
|
a9dab970b8 | ||
|
|
f12e1f029c | ||
|
|
9ca4ae7131 | ||
|
|
f874b16b2e | ||
|
|
14db086428 | ||
|
|
5cec6d1845 | ||
|
|
4024520ff8 | ||
|
|
3c9bb86fde | ||
|
|
304a1376c2 | ||
|
|
e0b77b004d | ||
|
|
9b14a810d2 | ||
|
|
1e7864c929 | ||
|
|
6d56a694f4 | ||
|
|
e9344e0dee | ||
|
|
9fd4f83f1a | ||
|
|
cc7cc853b1 | ||
|
|
3ed595e327 | ||
|
|
d0530382ee | ||
|
|
d8db29c481 | ||
|
|
f03f8b7f4c | ||
|
|
b2a6f90a67 | ||
|
|
a10c8dae85 | ||
|
|
4f68188d0b | ||
|
|
b59a4eba64 | ||
|
|
5c15039e06 | ||
|
|
6312d6cc7c | ||
|
|
09e9a26b71 | ||
|
|
7e68691ce9 | ||
|
|
97174780ce | ||
|
|
9532eb55ec | ||
|
|
a766c41d25 | ||
|
|
426218323b | ||
|
|
453aaaadc0 | ||
|
|
10383e6e6f | ||
|
|
5bc9484537 | ||
|
|
880005c3b7 | ||
|
|
b55e8840e4 | ||
|
|
2ddc13577c | ||
|
|
6bac9ca6d7 | ||
|
|
426049247b | ||
|
|
94dac0f3e5 | ||
|
|
49e01e5710 | ||
|
|
69a43d9974 | ||
|
|
78e74ab8a1 | ||
|
|
abce00fc6a | ||
|
|
0167447965 | ||
|
|
a6b1817940 | ||
|
|
db74c4fc6c | ||
|
|
81b8fdedf2 | ||
|
|
19780a521e | ||
|
|
48748c00c4 | ||
|
|
1fb6f68616 | ||
|
|
8d0bd9bb60 | ||
|
|
c413540fb9 | ||
|
|
e016681221 | ||
|
|
efe7b3176e | ||
|
|
0382b0ffee | ||
|
|
8da0d83a54 | ||
|
|
d11c634ced | ||
|
|
9bc7768ad3 | ||
|
|
43badd2cd4 | ||
|
|
6d1e699b5c | ||
|
|
ca7abb129c | ||
|
|
12f49b22ec | ||
|
|
0afcbc65cb | ||
|
|
843dd714cb | ||
|
|
b56a224e22 | ||
|
|
ab157e61a2 | ||
|
|
26b62796c2 | ||
|
|
028f674cd3 | ||
|
|
4914a88829 | ||
|
|
88d7182ada | ||
|
|
047486a384 | ||
|
|
c2b6e945e1 | ||
|
|
2decc92e2f | ||
|
|
5009d988da | ||
|
|
95d38afe96 | ||
|
|
2cca90dd40 | ||
|
|
837340bdce | ||
|
|
a099926fcc | ||
|
|
2ebeda48b2 | ||
|
|
8182a1cfb5 | ||
|
|
928d1ccd73 | ||
|
|
6745b7de6d | ||
|
|
8dc1eb6697 | ||
|
|
a2419b27fe | ||
|
|
8e0cee90d2 | ||
|
|
a46ef1e3a4 | ||
|
|
7c455a86bc | ||
|
|
4f581faa98 | ||
|
|
2d1d7b7e6f | ||
|
|
a11865016e | ||
|
|
1b8cb64393 | ||
|
|
8acde3dc47 | ||
|
|
ed872db8df | ||
|
|
3719680ee4 | ||
|
|
9fbb20a531 | ||
|
|
833c406b9b | ||
|
|
837e32ef55 | ||
|
|
f868c8df03 | ||
|
|
3b6645d3bf | ||
|
|
71063a69b8 | ||
|
|
89d3d7b2c0 | ||
|
|
8f06344e11 | ||
|
|
7f08a3523a | ||
|
|
cb3b381fcb | ||
|
|
42555bc18b | ||
|
|
7898a1a48d | ||
|
|
64fa928792 | ||
|
|
b9c43c8463 | ||
|
|
99d3497949 | ||
|
|
2eb47e5ee7 | ||
|
|
a39be79216 | ||
|
|
6362e3af14 | ||
|
|
ccbc9e5e17 | ||
|
|
d51ca9d9b3 | ||
|
|
fe13bd52ac | ||
|
|
7603a706eb | ||
|
|
f8a45302c9 | ||
|
|
94f6c674df | ||
|
|
75538813fc | ||
|
|
fb98c05e03 | ||
|
|
b4f1cd31f4 | ||
|
|
95ab2eb4a1 | ||
|
|
e2dfb922e1 | ||
|
|
0a2f522644 | ||
|
|
d53faa40e9 | ||
|
|
a4cf2c1184 | ||
|
|
4650526b5e | ||
|
|
40596aec0e | ||
|
|
26713515de | ||
|
|
804f26a9ff | ||
|
|
a412be2bc7 | ||
|
|
dbbaf25dd3 | ||
|
|
bc3d6b918b | ||
|
|
d18e4ea0d4 | ||
|
|
cea9750d11 | ||
|
|
14f13babb0 | ||
|
|
2615c6bd9e | ||
|
|
016af01598 | ||
|
|
aa530e6800 | ||
|
|
dae224a73f | ||
|
|
b4189b112f | ||
|
|
f6dd12d1e2 | ||
|
|
2f62e1f6ff | ||
|
|
d1d38081a7 | ||
|
|
1cc5fc1f6c | ||
|
|
ac3cc32367 | ||
|
|
df9c100542 | ||
|
|
4d08b8f30c | ||
|
|
cb683d3e3c | ||
|
|
5bdb189f86 | ||
|
|
aeb2263320 | ||
|
|
b2b90b7d34 | ||
|
|
a3f2d000e0 | ||
|
|
c5d60eadd5 | ||
|
|
def5ea4062 | ||
|
|
dce6e9e0c1 | ||
|
|
06a1f3e207 | ||
|
|
fec2dcb1a5 | ||
|
|
0a56966f7d | ||
|
|
0d67a8cd9d | ||
|
|
fe2294ec8d | ||
|
|
deca87ddf2 | ||
|
|
4bd67db100 | ||
|
|
fa4b54aca5 | ||
|
|
83827c4922 | ||
|
|
6f9f08005c | ||
|
|
2198b7ce2a | ||
|
|
4e75c5e02a | ||
|
|
ed6138461b | ||
|
|
be452fc9ac | ||
|
|
7f81b967ca | ||
|
|
862b2f9ad5 | ||
|
|
dc72b90cd6 | ||
|
|
37057d5d60 | ||
|
|
2889b05554 | ||
|
|
fde37e4e98 | ||
|
|
220a733d73 | ||
|
|
dc3e586938 | ||
|
|
0df5b41759 | ||
|
|
145f57897d | ||
|
|
d828d1dc57 | ||
|
|
93003aa172 | ||
|
|
d16c6375fe | ||
|
|
37b165620d | ||
|
|
3600f5568b | ||
|
|
58cce39f3a | ||
|
|
c605da97bf | ||
|
|
fe79b5e521 | ||
|
|
2ae3cc287e | ||
|
|
e975b15101 | ||
|
|
4d794dae21 | ||
|
|
e9981d58ca | ||
|
|
31d44ec4bd | ||
|
|
39bbf6a4a5 | ||
|
|
5037326d66 | ||
|
|
6bfc5ad3a1 | ||
|
|
0c2362861e | ||
|
|
847b9dcd1c | ||
|
|
3e1af5109c | ||
|
|
8ea2f756a9 | ||
|
|
a82c96b87f | ||
|
|
099829d5a9 | ||
|
|
99113e40ba | ||
|
|
c831748f4d | ||
|
|
9315802221 | ||
|
|
f5c7f90d72 | ||
|
|
e2c3660a0f | ||
|
|
06eb408da5 | ||
|
|
7386c35f58 | ||
|
|
98f438b52a | ||
|
|
9b8cd66524 | ||
|
|
9f5268388a | ||
|
|
6574d4ad0a | ||
|
|
1d818fde14 | ||
|
|
6ebc08c09d | ||
|
|
df9d900544 | ||
|
|
0b6bc36402 | ||
|
|
8824325b82 | ||
|
|
57b3751918 | ||
|
|
5ac75fc9a2 | ||
|
|
e2c46ed851 | ||
|
|
04710cc2d7 | ||
|
|
54d50fbfdf | ||
|
|
06675db684 | ||
|
|
6cdfb0207e | ||
|
|
e9e5d3392d | ||
|
|
cb967e2346 | ||
|
|
45f5d8f3fd | ||
|
|
468bd090ff | ||
|
|
5c1ece0ffc | ||
|
|
640fcbb07f | ||
|
|
123918b739 | ||
|
|
8d92329214 | ||
|
|
3dcf2feba8 | ||
|
|
8541db741a | ||
|
|
46c8f7a517 | ||
|
|
67e0631f8f | ||
|
|
d7add713a8 | ||
|
|
532b825ed9 | ||
|
|
7e8e683754 | ||
|
|
d79c9994f4 | ||
|
|
30858ff461 | ||
|
|
58c8ed5b0d | ||
|
|
f76d407ef3 | ||
|
|
7ddbbc45b7 | ||
|
|
0729ef01f8 | ||
|
|
ecaa299cab | ||
|
|
2ec2809460 | ||
|
|
f795595e95 | ||
|
|
878b00c395 | ||
|
|
9b6f72663e | ||
|
|
540f40f0cd | ||
|
|
5726378ece | ||
|
|
7e1c7cc274 | ||
|
|
4aba561c65 | ||
|
|
52839886d6 | ||
|
|
a97d4e218a | ||
|
|
ddd30f44a0 | ||
|
|
ba17de7fbc | ||
|
|
119c9c10b0 | ||
|
|
d0bba35197 | ||
|
|
4ccdbfcdb1 | ||
|
|
bc4b2ecf70 | ||
|
|
0b4f4cb0b4 | ||
|
|
338dca58c0 | ||
|
|
6dac0e738c | ||
|
|
2d4853039f | ||
|
|
56f07d980a | ||
|
|
fa1b293da2 | ||
|
|
cbcfd642a0 | ||
|
|
b825d1c800 | ||
|
|
dd64b9dbdd | ||
|
|
dba9152d15 | ||
|
|
d16f5574b6 | ||
|
|
4cb577c23f | ||
|
|
8c41c04ee4 | ||
|
|
753b1270da | ||
|
|
6368150a74 | ||
|
|
ec24108cc2 | ||
|
|
895b79ac2e | ||
|
|
b75537beaf | ||
|
|
84660d91b2 | ||
|
|
cc187f9337 | ||
|
|
2e052110ee | ||
|
|
85d1e03b9d | ||
|
|
1a94de60e8 | ||
|
|
73f1de31d1 | ||
|
|
3d5bba581b | ||
|
|
006bd8f4f6 | ||
|
|
c31e375ade | ||
|
|
62388a1e44 | ||
|
|
ae5521be9c | ||
|
|
8031a6f3d5 | ||
|
|
66b75e2d81 | ||
|
|
2dfbeea66f | ||
|
|
b898a5600a | ||
|
|
e26e6b3230 | ||
|
|
4a30e4acb4 | ||
|
|
f3ff64e000 | ||
|
|
f4c80d70f8 | ||
|
|
9526aa96a6 | ||
|
|
9259cd4bee | ||
|
|
8aed6d87ff | ||
|
|
959550b645 | ||
|
|
44b8ba484e | ||
|
|
17f6804837 | ||
|
|
c4aef549ad | ||
|
|
bab3eddac4 | ||
|
|
c448f35de2 | ||
|
|
7b0e804a4a | ||
|
|
6a5a70edf0 | ||
|
|
384122efa8 | ||
|
|
ef13dc4846 | ||
|
|
de7672b78f | ||
|
|
04d53794d6 | ||
|
|
5ceee46c6b | ||
|
|
0620dd49db | ||
|
|
c7ec06e8a6 | ||
|
|
24b93b9c76 | ||
|
|
5206648a4a | ||
|
|
edef6d29ae | ||
|
|
d642178654 | ||
|
|
1dff859d6a | ||
|
|
57ba3451b6 | ||
|
|
06671057b6 | ||
|
|
9ad246e6d2 | ||
|
|
2ac9c965dd | ||
|
|
935af0da38 | ||
|
|
210cb6dae2 | ||
|
|
3787133c9e | ||
|
|
99c4ec1eef | ||
|
|
ad5b4074e1 | ||
|
|
b63cc325a9 | ||
|
|
d4ca533d70 | ||
|
|
291e1eea5e | ||
|
|
85ece3df46 | ||
|
|
8dd9cca8ea | ||
|
|
5dbff34509 | ||
|
|
ce5bcefc60 | ||
|
|
afb463fb7a | ||
|
|
da5ef0bb42 | ||
|
|
7ce1f97a13 | ||
|
|
fdeac1e984 | ||
|
|
f89f688a55 | ||
|
|
07cff7b121 | ||
|
|
d46aab3fa8 | ||
|
|
5c39d262c0 | ||
|
|
895179a4dc | ||
|
|
8f9ce1a8a2 | ||
|
|
cc8c139a39 | ||
|
|
a5fe16c5a7 | ||
|
|
efdc55db75 | ||
|
|
54a582ed44 | ||
|
|
cd32375846 | ||
|
|
7a7eba8302 | ||
|
|
2c662ddde4 | ||
|
|
95f3fcda3c | ||
|
|
4a6d5de98c | ||
|
|
fafb936de5 | ||
|
|
b5c62c6b26 | ||
|
|
33453419b0 | ||
|
|
a0603523d2 | ||
|
|
f201a30244 | ||
|
|
cd0faba7cd | ||
|
|
f1e5b41388 | ||
|
|
5f027a315f | ||
|
|
5be34fc3e3 | ||
|
|
e6459c26b4 | ||
|
|
1757e2d7c3 | ||
|
|
13018bb997 | ||
|
|
4a926f528e | ||
|
|
5fb72e6888 | ||
|
|
b50641e357 | ||
|
|
efe3c7977a | ||
|
|
a9fc71c372 | ||
|
|
7155162844 | ||
|
|
54d77107c1 | ||
|
|
0aba6c8251 | ||
|
|
d94544051b | ||
|
|
99c7dae087 | ||
|
|
8ed2f182f7 | ||
|
|
52ddc6c0ed | ||
|
|
efefb5bda2 | ||
|
|
6ca88c4693 | ||
|
|
daa2fb6317 | ||
|
|
495e859e58 | ||
|
|
db3046f565 | ||
|
|
dc4f6d1b01 | ||
|
|
ae69a6aa9d | ||
|
|
53788a447f | ||
|
|
4fb44fb5b9 | ||
|
|
a80e6b53f9 | ||
|
|
b54b03f9e1 | ||
|
|
df2ebd75d3 | ||
|
|
5a4b328f52 | ||
|
|
2e1129b5f7 | ||
|
|
822072b1bb | ||
|
|
516a5fb64b | ||
|
|
8cc9ba3522 | ||
|
|
9e99143c47 | ||
|
|
8782bfb783 | ||
|
|
2725cd2290 | ||
|
|
1a536699fd | ||
|
|
bb93757b32 | ||
|
|
9a18e1d832 | ||
|
|
2f48c4e1ae | ||
|
|
c9f811c5d4 | ||
|
|
04299132af | ||
|
|
7a3eb8657d | ||
|
|
9c61dce3c8 | ||
|
|
a18f93279e | ||
|
|
8714ff6d51 | ||
|
|
c2bb7476c9 | ||
|
|
085ae346ac | ||
|
|
a78996cc4a | ||
|
|
cd3f30014a | ||
|
|
ee90c06e38 | ||
|
|
b36c82576e | ||
|
|
d9a02d1201 | ||
|
|
ea41c740ee | ||
|
|
84cebb89cc | ||
|
|
130f932cbc | ||
|
|
11ea16777f | ||
|
|
d216a36b37 | ||
|
|
c0e0740bef | ||
|
|
c8c069db92 | ||
|
|
1473058b5e | ||
|
|
de655e669a | ||
|
|
59e2d2694d | ||
|
|
3fdff14207 | ||
|
|
4804206dbe | ||
|
|
836d3adcce | ||
|
|
9b86d3dee6 | ||
|
|
fa21455e08 | ||
|
|
0b5cf95607 | ||
|
|
dd76e5ca62 | ||
|
|
5485852b43 | ||
|
|
ecc0967315 | ||
|
|
e3281d7d26 | ||
|
|
f73f18fe7b | ||
|
|
863ec09622 | ||
|
|
a845abbf3a | ||
|
|
1565ebec2c | ||
|
|
1acfb9e9f0 | ||
|
|
1a7104fde3 | ||
|
|
60c3635f05 | ||
|
|
247dc1bd0b | ||
|
|
176f31c2e3 | ||
|
|
12f9d51e82 | ||
|
|
c193b39134 | ||
|
|
84196cb231 | ||
|
|
0836cbb9f5 | ||
|
|
1df2f80367 | ||
|
|
f203c98794 | ||
|
|
cc4bd762df | ||
|
|
03ad6bd483 | ||
|
|
40e576e29c | ||
|
|
8e9ca83537 | ||
|
|
579b637b6c | ||
|
|
c1799b0f85 | ||
|
|
6aad81ec0c | ||
|
|
803a28fd1d | ||
|
|
031919dafb | ||
|
|
d8e357b7cf | ||
|
|
8c5b1e30d4 | ||
|
|
b31cc1c613 | ||
|
|
d6118c5be6 | ||
|
|
19f0722b2c | ||
|
|
28a81ed62f | ||
|
|
63b75cf7d7 | ||
|
|
bd0d45ca69 | ||
|
|
0962d3cdff | ||
|
|
837d7f85a9 | ||
|
|
fd8fb32bdd | ||
|
|
7ca638c761 | ||
|
|
ce6d47934b | ||
|
|
2ebf7d56fa | ||
|
|
e86d74d748 | ||
|
|
afe560b072 | ||
|
|
00714e5102 | ||
|
|
4a9a118a94 | ||
|
|
6824ddd93d | ||
|
|
788163e204 | ||
|
|
7e07dc429f | ||
|
|
6b2b9a58c4 | ||
|
|
b3e5db402d | ||
|
|
5d3ed79944 | ||
|
|
60041eac4b | ||
|
|
0a4c135f68 | ||
|
|
7f025eb425 | ||
|
|
3d26eae14a | ||
|
|
1a63c7c281 | ||
|
|
f8826d31cd | ||
|
|
f30a882cc6 | ||
|
|
95c603ae6f | ||
|
|
8383a553a6 | ||
|
|
ca90336a69 | ||
|
|
eaf41a943b | ||
|
|
91934025b9 | ||
|
|
20f0617e87 | ||
|
|
49ff74da9b | ||
|
|
600ec04739 | ||
|
|
fd2fcb817c | ||
|
|
a1eb4c6d2f | ||
|
|
14d5ad7d2b | ||
|
|
ad010f6306 | ||
|
|
3f22e993f0 | ||
|
|
a137f4eac0 | ||
|
|
468b2bcb2e | ||
|
|
38642614cf | ||
|
|
6e27a8620f | ||
|
|
ec638a1602 | ||
|
|
208251956d | ||
|
|
d5adf297e6 | ||
|
|
6b0ddf8ee5 | ||
|
|
caa76e6021 | ||
|
|
42e1aa5b2c | ||
|
|
c132c8e505 | ||
|
|
e446921def | ||
|
|
329688c161 | ||
|
|
02491e009d | ||
|
|
a0fc256d65 | ||
|
|
bfc8fdf1fc | ||
|
|
747aa9f8ca | ||
|
|
5e45b558b0 | ||
|
|
50d2a3059d | ||
|
|
644b86677f | ||
|
|
4abf5aa81a | ||
|
|
1f1e8dd8ec | ||
|
|
18b69be00f | ||
|
|
0084309cd2 | ||
|
|
f50efcb65d | ||
|
|
f88a9e6323 | ||
|
|
3352baac4b | ||
|
|
b25e387c0d | ||
|
|
67d7b44784 | ||
|
|
2d951686a7 | ||
|
|
7d2a0c848e | ||
|
|
7fc1e17f4c | ||
|
|
b78aac5582 | ||
|
|
6ae9361510 | ||
|
|
ef27d434d1 | ||
|
|
b2d574f126 | ||
|
|
30805237fa | ||
|
|
b43d9a920b |
@@ -1,21 +0,0 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:9.4
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
testenv:
|
||||
image: python:2.7
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
volumes:
|
||||
- ..:/app
|
||||
@@ -1,21 +0,0 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
testenv:
|
||||
image: python:2.7
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
volumes:
|
||||
- ..:/app
|
||||
@@ -1,21 +0,0 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:9.4
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
testenv:
|
||||
image: python:3.5
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
volumes:
|
||||
- ..:/app
|
||||
33
.buildkite/format_tap.py
Normal file
33
.buildkite/format_tap.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import sys
|
||||
from tap.parser import Parser
|
||||
from tap.line import Result, Unknown, Diagnostic
|
||||
|
||||
out = ["### TAP Output for " + sys.argv[2]]
|
||||
|
||||
p = Parser()
|
||||
|
||||
in_error = False
|
||||
|
||||
for line in p.parse_file(sys.argv[1]):
|
||||
if isinstance(line, Result):
|
||||
if in_error:
|
||||
out.append("")
|
||||
out.append("</pre></code></details>")
|
||||
out.append("")
|
||||
out.append("----")
|
||||
out.append("")
|
||||
in_error = False
|
||||
|
||||
if not line.ok and not line.todo:
|
||||
in_error = True
|
||||
|
||||
out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description))
|
||||
out.append("")
|
||||
out.append("<details><summary>Show log</summary><code><pre>")
|
||||
|
||||
elif isinstance(line, Diagnostic) and in_error:
|
||||
out.append(line.text)
|
||||
|
||||
if out:
|
||||
for line in out[:-3]:
|
||||
print(line)
|
||||
@@ -1,22 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
set -ex
|
||||
|
||||
# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful.
|
||||
# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL.
|
||||
echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV
|
||||
source $BASH_ENV
|
||||
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
||||
echo "Not merging forward, as this is a release branch"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -z "${CIRCLE_PR_NUMBER}" ]]
|
||||
then
|
||||
echo "Can't figure out what the PR number is! Assuming merge target is develop."
|
||||
if [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]]; then
|
||||
echo "Not a pull request, or hasn't had a PR opened yet..."
|
||||
|
||||
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
||||
# can probably assume it's based on it and will be merged into it.
|
||||
GITBASE="develop"
|
||||
else
|
||||
# Get the reference, using the GitHub API
|
||||
GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
|
||||
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||
fi
|
||||
|
||||
# Show what we are before
|
||||
@@ -2,10 +2,11 @@ env:
|
||||
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
||||
|
||||
steps:
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e pep8"
|
||||
label: "\U0001F9F9 PEP-8"
|
||||
- "tox -e check_codestyle"
|
||||
label: "\U0001F9F9 Check Style"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
@@ -36,8 +37,6 @@ steps:
|
||||
image: "python:3.6"
|
||||
propagate-environment: true
|
||||
|
||||
- wait
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check-sampleconfig"
|
||||
@@ -46,16 +45,25 @@ steps:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- wait
|
||||
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py27,codecov"
|
||||
label: ":python: 2.7 / SQLite"
|
||||
- "tox -e py35-old,codecov"
|
||||
label: ":python: 3.5 / SQLite / Old Deps"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:2.7"
|
||||
image: "python:3.5"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
@@ -67,6 +75,12 @@ steps:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.5"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
@@ -78,6 +92,12 @@ steps:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
@@ -89,50 +109,12 @@ steps:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.7"
|
||||
propagate-environment: true
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py27-old,codecov"
|
||||
label: ":python: 2.7 / SQLite / Old Deps"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:2.7"
|
||||
propagate-environment: true
|
||||
|
||||
- label: ":python: 2.7 / :postgres: 9.4"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 4"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py27-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py27.pg94.yaml
|
||||
|
||||
- label: ":python: 2.7 / :postgres: 9.5"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 4"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py27-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py27.pg95.yaml
|
||||
|
||||
- label: ":python: 3.5 / :postgres: 9.4"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 4"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py35.pg94.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.5 / :postgres: 9.5"
|
||||
env:
|
||||
@@ -144,6 +126,12 @@ steps:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py35.pg95.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 9.5"
|
||||
env:
|
||||
@@ -155,6 +143,12 @@ steps:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg95.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 11"
|
||||
env:
|
||||
@@ -166,3 +160,67 @@ steps:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg11.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
|
||||
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash .buildkite/synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash .buildkite/synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
WORKERS: "1"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash .buildkite/synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
soft_fail: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
145
.buildkite/synapse_sytest.sh
Normal file
145
.buildkite/synapse_sytest.sh
Normal file
@@ -0,0 +1,145 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Fetch sytest, and then run the tests for synapse. The entrypoint for the
|
||||
# sytest-synapse docker images.
|
||||
|
||||
set -ex
|
||||
|
||||
if [ -n "$BUILDKITE" ]
|
||||
then
|
||||
SYNAPSE_DIR=`pwd`
|
||||
else
|
||||
SYNAPSE_DIR="/src"
|
||||
fi
|
||||
|
||||
# Attempt to find a sytest to use.
|
||||
# If /sytest exists, it means that a SyTest checkout has been mounted into the Docker image.
|
||||
if [ -d "/sytest" ]; then
|
||||
# If the user has mounted in a SyTest checkout, use that.
|
||||
echo "Using local sytests..."
|
||||
|
||||
# create ourselves a working directory and dos2unix some scripts therein
|
||||
mkdir -p /work/jenkins
|
||||
for i in install-deps.pl run-tests.pl tap-to-junit-xml.pl jenkins/prep_sytest_for_postgres.sh; do
|
||||
dos2unix -n "/sytest/$i" "/work/$i"
|
||||
done
|
||||
ln -sf /sytest/tests /work
|
||||
ln -sf /sytest/keys /work
|
||||
SYTEST_LIB="/sytest/lib"
|
||||
else
|
||||
if [ -n "BUILDKITE_BRANCH" ]
|
||||
then
|
||||
branch_name=$BUILDKITE_BRANCH
|
||||
else
|
||||
# Otherwise, try and find out what the branch that the Synapse checkout is using. Fall back to develop if it's not a branch.
|
||||
branch_name="$(git --git-dir=/src/.git symbolic-ref HEAD 2>/dev/null)" || branch_name="develop"
|
||||
fi
|
||||
|
||||
# Try and fetch the branch
|
||||
echo "Trying to get same-named sytest branch..."
|
||||
wget -q https://github.com/matrix-org/sytest/archive/$branch_name.tar.gz -O sytest.tar.gz || {
|
||||
# Probably a 404, fall back to develop
|
||||
echo "Using develop instead..."
|
||||
wget -q https://github.com/matrix-org/sytest/archive/develop.tar.gz -O sytest.tar.gz
|
||||
}
|
||||
|
||||
mkdir -p /work
|
||||
tar -C /work --strip-components=1 -xf sytest.tar.gz
|
||||
SYTEST_LIB="/work/lib"
|
||||
fi
|
||||
|
||||
cd /work
|
||||
|
||||
# PostgreSQL setup
|
||||
if [ -n "$POSTGRES" ]
|
||||
then
|
||||
export PGUSER=postgres
|
||||
export POSTGRES_DB_1=pg1
|
||||
export POSTGRES_DB_2=pg2
|
||||
|
||||
# Start the database
|
||||
su -c 'eatmydata /usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres
|
||||
|
||||
# Use the Jenkins script to write out the configuration for a PostgreSQL using Synapse
|
||||
jenkins/prep_sytest_for_postgres.sh
|
||||
|
||||
# Make the test databases for the two Synapse servers that will be spun up
|
||||
su -c 'psql -c "CREATE DATABASE pg1;"' postgres
|
||||
su -c 'psql -c "CREATE DATABASE pg2;"' postgres
|
||||
|
||||
fi
|
||||
|
||||
if [ -n "$OFFLINE" ]; then
|
||||
# if we're in offline mode, just put synapse into the virtualenv, and
|
||||
# hope that the deps are up-to-date.
|
||||
#
|
||||
# (`pip install -e` likes to reinstall setuptools even if it's already installed,
|
||||
# so we just run setup.py explicitly.)
|
||||
#
|
||||
(cd $SYNAPSE_DIR && /venv/bin/python setup.py -q develop)
|
||||
else
|
||||
# We've already created the virtualenv, but lets double check we have all
|
||||
# deps.
|
||||
/venv/bin/pip install -q --upgrade --no-cache-dir -e $SYNAPSE_DIR
|
||||
/venv/bin/pip install -q --upgrade --no-cache-dir \
|
||||
lxml psycopg2 coverage codecov tap.py
|
||||
|
||||
# Make sure all Perl deps are installed -- this is done in the docker build
|
||||
# so will only install packages added since the last Docker build
|
||||
./install-deps.pl
|
||||
fi
|
||||
|
||||
|
||||
# Run the tests
|
||||
>&2 echo "+++ Running tests"
|
||||
|
||||
RUN_TESTS=(
|
||||
perl -I "$SYTEST_LIB" ./run-tests.pl --python=/venv/bin/python --synapse-directory=$SYNAPSE_DIR --coverage -O tap --all
|
||||
)
|
||||
|
||||
TEST_STATUS=0
|
||||
|
||||
if [ -n "$WORKERS" ]; then
|
||||
RUN_TESTS+=(-I Synapse::ViaHaproxy --dendron-binary=/pydron.py)
|
||||
else
|
||||
RUN_TESTS+=(-I Synapse)
|
||||
fi
|
||||
|
||||
"${RUN_TESTS[@]}" "$@" > results.tap || TEST_STATUS=$?
|
||||
|
||||
if [ $TEST_STATUS -ne 0 ]; then
|
||||
>&2 echo -e "run-tests \e[31mFAILED\e[0m: exit code $TEST_STATUS"
|
||||
else
|
||||
>&2 echo -e "run-tests \e[32mPASSED\e[0m"
|
||||
fi
|
||||
|
||||
>&2 echo "--- Copying assets"
|
||||
|
||||
# Copy out the logs
|
||||
mkdir -p /logs
|
||||
cp results.tap /logs/results.tap
|
||||
rsync --ignore-missing-args --min-size=1B -av server-0 server-1 /logs --include "*/" --include="*.log.*" --include="*.log" --exclude="*"
|
||||
|
||||
# Upload coverage to codecov and upload files, if running on Buildkite
|
||||
if [ -n "$BUILDKITE" ]
|
||||
then
|
||||
/venv/bin/coverage combine || true
|
||||
/venv/bin/coverage xml || true
|
||||
/venv/bin/codecov -X gcov -f coverage.xml
|
||||
|
||||
wget -O buildkite.tar.gz https://github.com/buildkite/agent/releases/download/v3.13.0/buildkite-agent-linux-amd64-3.13.0.tar.gz
|
||||
tar xvf buildkite.tar.gz
|
||||
chmod +x ./buildkite-agent
|
||||
|
||||
# Upload the files
|
||||
./buildkite-agent artifact upload "/logs/**/*.log*"
|
||||
./buildkite-agent artifact upload "/logs/results.tap"
|
||||
|
||||
if [ $TEST_STATUS -ne 0 ]; then
|
||||
# Annotate, if failure
|
||||
/venv/bin/python $SYNAPSE_DIR/.buildkite/format_tap.py /logs/results.tap "$BUILDKITE_LABEL" | ./buildkite-agent annotate --style="error" --context="$BUILDKITE_LABEL"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
exit $TEST_STATUS
|
||||
@@ -4,160 +4,23 @@ jobs:
|
||||
machine: true
|
||||
steps:
|
||||
- checkout
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG}-py2 .
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG} -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 .
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG} -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 .
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py2
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3
|
||||
dockerhubuploadlatest:
|
||||
machine: true
|
||||
steps:
|
||||
- checkout
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest-py2 .
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest -t matrixdotorg/synapse:latest-py3 --build-arg PYTHON_VERSION=3.6 .
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest -t matrixdotorg/synapse:latest-py3 .
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
- run: docker push matrixdotorg/synapse:latest
|
||||
- run: docker push matrixdotorg/synapse:latest-py2
|
||||
- run: docker push matrixdotorg/synapse:latest-py3
|
||||
sytestpy2:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy2
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy2postgres:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy2
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: POSTGRES=1 /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy2merged:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy2
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: bash .circleci/merge_base_branch.sh
|
||||
- run: /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy2postgresmerged:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy2
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: bash .circleci/merge_base_branch.sh
|
||||
- run: POSTGRES=1 /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
|
||||
sytestpy3:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy3
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy3postgres:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy3
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: POSTGRES=1 /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy3merged:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy3
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: bash .circleci/merge_base_branch.sh
|
||||
- run: /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy3postgresmerged:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy3
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: bash .circleci/merge_base_branch.sh
|
||||
- run: POSTGRES=1 /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
build:
|
||||
jobs:
|
||||
- sytestpy2:
|
||||
filters:
|
||||
branches:
|
||||
only: /develop|master|release-.*/
|
||||
- sytestpy2postgres:
|
||||
filters:
|
||||
branches:
|
||||
only: /develop|master|release-.*/
|
||||
- sytestpy3:
|
||||
filters:
|
||||
branches:
|
||||
only: /develop|master|release-.*/
|
||||
- sytestpy3postgres:
|
||||
filters:
|
||||
branches:
|
||||
only: /develop|master|release-.*/
|
||||
- sytestpy2merged:
|
||||
filters:
|
||||
branches:
|
||||
ignore: /develop|master|release-.*/
|
||||
- sytestpy2postgresmerged:
|
||||
filters:
|
||||
branches:
|
||||
ignore: /develop|master|release-.*/
|
||||
- sytestpy3merged:
|
||||
filters:
|
||||
branches:
|
||||
ignore: /develop|master|release-.*/
|
||||
- sytestpy3postgresmerged:
|
||||
filters:
|
||||
branches:
|
||||
ignore: /develop|master|release-.*/
|
||||
- dockerhubuploadrelease:
|
||||
filters:
|
||||
tags:
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
Dockerfile
|
||||
.travis.yml
|
||||
.gitignore
|
||||
demo/etc
|
||||
tox.ini
|
||||
.git/*
|
||||
.tox/*
|
||||
debian/matrix-synapse/
|
||||
debian/matrix-synapse-*/
|
||||
# ignore everything by default
|
||||
*
|
||||
|
||||
# things to include
|
||||
!docker
|
||||
!scripts
|
||||
!synapse
|
||||
!MANIFEST.in
|
||||
!README.rst
|
||||
!setup.py
|
||||
!synctl
|
||||
|
||||
**/__pycache__
|
||||
|
||||
4
.github/FUNDING.yml
vendored
Normal file
4
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# One username per supported platform and one custom link
|
||||
patreon: matrixdotorg
|
||||
liberapay: matrixdotorg
|
||||
custom: https://paypal.me/matrixdotorg
|
||||
5
.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md
vendored
5
.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md
vendored
@@ -4,6 +4,7 @@ about: I need support for Synapse
|
||||
|
||||
---
|
||||
|
||||
# Please ask for support in [**#matrix:matrix.org**](https://matrix.to/#/#matrix:matrix.org)
|
||||
Please don't file github issues asking for support.
|
||||
|
||||
## Don't file an issue as a support request.
|
||||
Instead, please join [`#synapse:matrix.org`](https://matrix.to/#/#synapse:matrix.org)
|
||||
(from a matrix.org account if necessary), and ask there.
|
||||
|
||||
6
.github/SUPPORT.md
vendored
6
.github/SUPPORT.md
vendored
@@ -1,3 +1,3 @@
|
||||
[**#matrix:matrix.org**](https://matrix.to/#/#matrix:matrix.org) is the official support room for Matrix, and can be accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html
|
||||
|
||||
It can also be access via IRC bridge at irc://irc.freenode.net/matrix or on the web here: https://webchat.freenode.net/?channels=matrix
|
||||
[**#synapse:matrix.org**](https://matrix.to/#/#synapse:matrix.org) is the official support room for
|
||||
Synapse, and can be accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html.
|
||||
Please ask for support there, rather than filing github issues.
|
||||
|
||||
@@ -72,3 +72,6 @@ Jason Robinson <jasonr at matrix.org>
|
||||
|
||||
Joseph Weston <joseph at weston.cloud>
|
||||
+ Add admin API for querying HS version
|
||||
|
||||
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
||||
* Documentation improvements
|
||||
|
||||
425
CHANGES.md
425
CHANGES.md
@@ -1,3 +1,428 @@
|
||||
Synapse 1.1.0 (2019-07-04)
|
||||
==========================
|
||||
|
||||
As of v1.1.0, Synapse no longer supports Python 2, nor Postgres version 9.4.
|
||||
See the [upgrade notes](UPGRADE.rst#upgrading-to-v110) for more details.
|
||||
|
||||
This release also deprecates the use of environment variables to configure the
|
||||
docker image. See the [docker README](https://github.com/matrix-org/synapse/blob/release-v1.1.0/docker/README.md#legacy-dynamic-configuration-file-support)
|
||||
for more details.
|
||||
|
||||
No changes since 1.1.0rc2.
|
||||
|
||||
|
||||
Synapse 1.1.0rc2 (2019-07-03)
|
||||
=============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix regression in 1.1rc1 where OPTIONS requests to the media repo would fail. ([\#5593](https://github.com/matrix-org/synapse/issues/5593))
|
||||
- Removed the `SYNAPSE_SMTP_*` docker container environment variables. Using these environment variables prevented the docker container from starting in Synapse v1.0, even though they didn't actually allow any functionality anyway. ([\#5596](https://github.com/matrix-org/synapse/issues/5596))
|
||||
- Fix a number of "Starting txn from sentinel context" warnings. ([\#5605](https://github.com/matrix-org/synapse/issues/5605))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Update github templates. ([\#5552](https://github.com/matrix-org/synapse/issues/5552))
|
||||
|
||||
|
||||
Synapse 1.1.0rc1 (2019-07-02)
|
||||
=============================
|
||||
|
||||
As of v1.1.0, Synapse no longer supports Python 2, nor Postgres version 9.4.
|
||||
See the [upgrade notes](UPGRADE.rst#upgrading-to-v110) for more details.
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092))
|
||||
- Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252))
|
||||
- Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363))
|
||||
- Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461))
|
||||
- Add --no-daemonize option to run synapse in the foreground, per issue #4130. Contributed by Soham Gumaste. ([\#5412](https://github.com/matrix-org/synapse/issues/5412), [\#5587](https://github.com/matrix-org/synapse/issues/5587))
|
||||
- Fully support SAML2 authentication. Contributed by [Alexander Trost](https://github.com/galexrt) - thank you! ([\#5422](https://github.com/matrix-org/synapse/issues/5422))
|
||||
- Allow server admins to define implementations of extra rules for allowing or denying incoming events. ([\#5440](https://github.com/matrix-org/synapse/issues/5440), [\#5474](https://github.com/matrix-org/synapse/issues/5474), [\#5477](https://github.com/matrix-org/synapse/issues/5477))
|
||||
- Add support for handling pagination APIs on client reader worker. ([\#5505](https://github.com/matrix-org/synapse/issues/5505), [\#5513](https://github.com/matrix-org/synapse/issues/5513), [\#5531](https://github.com/matrix-org/synapse/issues/5531))
|
||||
- Improve help and cmdline option names for --generate-config options. ([\#5512](https://github.com/matrix-org/synapse/issues/5512))
|
||||
- Allow configuration of the path used for ACME account keys. ([\#5516](https://github.com/matrix-org/synapse/issues/5516), [\#5521](https://github.com/matrix-org/synapse/issues/5521), [\#5522](https://github.com/matrix-org/synapse/issues/5522))
|
||||
- Add --data-dir and --open-private-ports options. ([\#5524](https://github.com/matrix-org/synapse/issues/5524))
|
||||
- Split public rooms directory auth config in two settings, in order to manage client auth independently from the federation part of it. Obsoletes the "restrict_public_rooms_to_local_users" configuration setting. If "restrict_public_rooms_to_local_users" is set in the config, Synapse will act as if both new options are enabled, i.e. require authentication through the client API and deny federation requests. ([\#5534](https://github.com/matrix-org/synapse/issues/5534))
|
||||
- The minimum TLS version used for outgoing federation requests can now be set with `federation_client_minimum_tls_version`. ([\#5550](https://github.com/matrix-org/synapse/issues/5550))
|
||||
- Optimise devices changed query to not pull unnecessary rows from the database, reducing database load. ([\#5559](https://github.com/matrix-org/synapse/issues/5559))
|
||||
- Add new metrics for number of forward extremities being persisted and number of state groups involved in resolution. ([\#5476](https://github.com/matrix-org/synapse/issues/5476))
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix bug processing incoming events over federation if call to `/get_missing_events` fails. ([\#5042](https://github.com/matrix-org/synapse/issues/5042))
|
||||
- Prevent more than one room upgrade happening simultaneously on the same room. ([\#5051](https://github.com/matrix-org/synapse/issues/5051))
|
||||
- Fix a bug where running synapse_port_db would cause the account validity feature to fail because it didn't set the type of the email_sent column to boolean. ([\#5325](https://github.com/matrix-org/synapse/issues/5325))
|
||||
- Warn about disabling email-based password resets when a reset occurs, and remove warning when someone attempts a phone-based reset. ([\#5387](https://github.com/matrix-org/synapse/issues/5387))
|
||||
- Fix email notifications for unnamed rooms with multiple people. ([\#5388](https://github.com/matrix-org/synapse/issues/5388))
|
||||
- Fix exceptions in federation reader worker caused by attempting to renew attestations, which should only happen on master worker. ([\#5389](https://github.com/matrix-org/synapse/issues/5389))
|
||||
- Fix handling of failures fetching remote content to not log failures as exceptions. ([\#5390](https://github.com/matrix-org/synapse/issues/5390))
|
||||
- Fix a bug where deactivated users could receive renewal emails if the account validity feature is on. ([\#5394](https://github.com/matrix-org/synapse/issues/5394))
|
||||
- Fix missing invite state after exchanging 3PID invites over federaton. ([\#5464](https://github.com/matrix-org/synapse/issues/5464))
|
||||
- Fix intermittent exceptions on Apple hardware. Also fix bug that caused database activity times to be under-reported in log lines. ([\#5498](https://github.com/matrix-org/synapse/issues/5498))
|
||||
- Fix logging error when a tampered event is detected. ([\#5500](https://github.com/matrix-org/synapse/issues/5500))
|
||||
- Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507))
|
||||
- Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514))
|
||||
- Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523))
|
||||
- Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586))
|
||||
- Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
- Add ability to change Docker containers [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) with the `TZ` variable. ([\#5383](https://github.com/matrix-org/synapse/issues/5383))
|
||||
- Update docker image to use Python 3.7. ([\#5546](https://github.com/matrix-org/synapse/issues/5546))
|
||||
- Deprecate the use of environment variables for configuration, and make the use of a static configuration the default. ([\#5561](https://github.com/matrix-org/synapse/issues/5561), [\#5562](https://github.com/matrix-org/synapse/issues/5562), [\#5566](https://github.com/matrix-org/synapse/issues/5566), [\#5567](https://github.com/matrix-org/synapse/issues/5567))
|
||||
- Increase default log level for docker image to INFO. It can still be changed by editing the generated log.config file. ([\#5547](https://github.com/matrix-org/synapse/issues/5547))
|
||||
- Send synapse logs to the docker logging system, by default. ([\#5565](https://github.com/matrix-org/synapse/issues/5565))
|
||||
- Open the non-TLS port by default. ([\#5568](https://github.com/matrix-org/synapse/issues/5568))
|
||||
- Fix failure to start under docker with SAML support enabled. ([\#5490](https://github.com/matrix-org/synapse/issues/5490))
|
||||
- Use a sensible location for data files when generating a config file. ([\#5563](https://github.com/matrix-org/synapse/issues/5563))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Python 2.7 is no longer a supported platform. Synapse now requires Python 3.5+ to run. ([\#5425](https://github.com/matrix-org/synapse/issues/5425))
|
||||
- PostgreSQL 9.4 is no longer supported. Synapse requires Postgres 9.5+ or above for Postgres support. ([\#5448](https://github.com/matrix-org/synapse/issues/5448))
|
||||
- Remove support for cpu_affinity setting. ([\#5525](https://github.com/matrix-org/synapse/issues/5525))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
- Improve README section on performance troubleshooting. ([\#4276](https://github.com/matrix-org/synapse/issues/4276))
|
||||
- Add information about how to install and run `black` on the codebase to code_style.rst. ([\#5537](https://github.com/matrix-org/synapse/issues/5537))
|
||||
- Improve install docs on choosing server_name. ([\#5558](https://github.com/matrix-org/synapse/issues/5558))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add logging to 3pid invite signature verification. ([\#5015](https://github.com/matrix-org/synapse/issues/5015))
|
||||
- Update example haproxy config to a more compatible setup. ([\#5313](https://github.com/matrix-org/synapse/issues/5313))
|
||||
- Track deactivated accounts in the database. ([\#5378](https://github.com/matrix-org/synapse/issues/5378), [\#5465](https://github.com/matrix-org/synapse/issues/5465), [\#5493](https://github.com/matrix-org/synapse/issues/5493))
|
||||
- Clean up code for sending federation EDUs. ([\#5381](https://github.com/matrix-org/synapse/issues/5381))
|
||||
- Add a sponsor button to the repo. ([\#5382](https://github.com/matrix-org/synapse/issues/5382), [\#5386](https://github.com/matrix-org/synapse/issues/5386))
|
||||
- Don't log non-200 responses from federation queries as exceptions. ([\#5383](https://github.com/matrix-org/synapse/issues/5383))
|
||||
- Update Python syntax in contrib/ to Python 3. ([\#5446](https://github.com/matrix-org/synapse/issues/5446))
|
||||
- Update federation_client dev script to support `.well-known` and work with python3. ([\#5447](https://github.com/matrix-org/synapse/issues/5447))
|
||||
- SyTest has been moved to Buildkite. ([\#5459](https://github.com/matrix-org/synapse/issues/5459))
|
||||
- Demo script now uses python3. ([\#5460](https://github.com/matrix-org/synapse/issues/5460))
|
||||
- Synapse can now handle RestServlets that return coroutines. ([\#5475](https://github.com/matrix-org/synapse/issues/5475), [\#5585](https://github.com/matrix-org/synapse/issues/5585))
|
||||
- The demo servers talk to each other again. ([\#5478](https://github.com/matrix-org/synapse/issues/5478))
|
||||
- Add an EXPERIMENTAL config option to try and periodically clean up extremities by sending dummy events. ([\#5480](https://github.com/matrix-org/synapse/issues/5480))
|
||||
- Synapse's codebase is now formatted by `black`. ([\#5482](https://github.com/matrix-org/synapse/issues/5482))
|
||||
- Some cleanups and sanity-checking in the CPU and database metrics. ([\#5499](https://github.com/matrix-org/synapse/issues/5499))
|
||||
- Improve email notification logging. ([\#5502](https://github.com/matrix-org/synapse/issues/5502))
|
||||
- Fix "Unexpected entry in 'full_schemas'" log warning. ([\#5509](https://github.com/matrix-org/synapse/issues/5509))
|
||||
- Improve logging when generating config files. ([\#5510](https://github.com/matrix-org/synapse/issues/5510))
|
||||
- Refactor and clean up Config parser for maintainability. ([\#5511](https://github.com/matrix-org/synapse/issues/5511))
|
||||
- Make the config clearer in that email.template_dir is relative to the Synapse's root directory, not the `synapse/` folder within it. ([\#5543](https://github.com/matrix-org/synapse/issues/5543))
|
||||
- Update v1.0.0 release changelog to include more information about changes to password resets. ([\#5545](https://github.com/matrix-org/synapse/issues/5545))
|
||||
- Remove non-functioning check_event_hash.py dev script. ([\#5548](https://github.com/matrix-org/synapse/issues/5548))
|
||||
- Synapse will now only allow TLS v1.2 connections when serving federation, if it terminates TLS. As Synapse's allowed ciphers were only able to be used in TLSv1.2 before, this does not change behaviour. ([\#5550](https://github.com/matrix-org/synapse/issues/5550))
|
||||
- Logging when running GC collection on generation 0 is now at the DEBUG level, not INFO. ([\#5557](https://github.com/matrix-org/synapse/issues/5557))
|
||||
- Reduce the amount of stuff we send in the docker context. ([\#5564](https://github.com/matrix-org/synapse/issues/5564))
|
||||
- Point the reverse links in the Purge History contrib scripts at the intended location. ([\#5570](https://github.com/matrix-org/synapse/issues/5570))
|
||||
|
||||
|
||||
Synapse 1.0.0 (2019-06-11)
|
||||
==========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix bug where attempting to send transactions with large number of EDUs can fail. ([\#5418](https://github.com/matrix-org/synapse/issues/5418))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Expand the federation guide to include relevant content from the MSC1711 FAQ ([\#5419](https://github.com/matrix-org/synapse/issues/5419))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Move password reset links to /_matrix/client/unstable namespace. ([\#5424](https://github.com/matrix-org/synapse/issues/5424))
|
||||
|
||||
|
||||
Synapse 1.0.0rc3 (2019-06-10)
|
||||
=============================
|
||||
|
||||
Security: Fix authentication bug introduced in 1.0.0rc1. Please upgrade to rc3 immediately
|
||||
|
||||
|
||||
Synapse 1.0.0rc2 (2019-06-10)
|
||||
=============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Remove redundant warning about key server response validation. ([\#5392](https://github.com/matrix-org/synapse/issues/5392))
|
||||
- Fix bug where old keys stored in the database with a null valid until timestamp caused all verification requests for that key to fail. ([\#5415](https://github.com/matrix-org/synapse/issues/5415))
|
||||
- Fix excessive memory using with default `federation_verify_certificates: true` configuration. ([\#5417](https://github.com/matrix-org/synapse/issues/5417))
|
||||
|
||||
|
||||
Synapse 1.0.0rc1 (2019-06-07)
|
||||
=============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Synapse now more efficiently collates room statistics. ([\#4338](https://github.com/matrix-org/synapse/issues/4338), [\#5260](https://github.com/matrix-org/synapse/issues/5260), [\#5324](https://github.com/matrix-org/synapse/issues/5324))
|
||||
- Add experimental support for relations (aka reactions and edits). ([\#5220](https://github.com/matrix-org/synapse/issues/5220))
|
||||
- Ability to configure default room version. ([\#5223](https://github.com/matrix-org/synapse/issues/5223), [\#5249](https://github.com/matrix-org/synapse/issues/5249))
|
||||
- Allow configuring a range for the account validity startup job. ([\#5276](https://github.com/matrix-org/synapse/issues/5276))
|
||||
- CAS login will now hit the r0 API, not the deprecated v1 one. ([\#5286](https://github.com/matrix-org/synapse/issues/5286))
|
||||
- Validate federation server TLS certificates by default (implements [MSC1711](https://github.com/matrix-org/matrix-doc/blob/master/proposals/1711-x509-for-federation.md)). ([\#5359](https://github.com/matrix-org/synapse/issues/5359))
|
||||
- Update /_matrix/client/versions to reference support for r0.5.0. ([\#5360](https://github.com/matrix-org/synapse/issues/5360))
|
||||
- Add a script to generate new signing-key files. ([\#5361](https://github.com/matrix-org/synapse/issues/5361))
|
||||
- Update upgrade and installation guides ahead of 1.0. ([\#5371](https://github.com/matrix-org/synapse/issues/5371))
|
||||
- Replace the `perspectives` configuration section with `trusted_key_servers`, and make validating the signatures on responses optional (since TLS will do this job for us). ([\#5374](https://github.com/matrix-org/synapse/issues/5374))
|
||||
- Add ability to perform password reset via email without trusting the identity server. **As a result of this PR, password resets will now be disabled on the default configuration.**
|
||||
|
||||
Password reset emails are now sent from the homeserver by default, instead of the identity server. To enable this functionality, ensure `email` and `public_baseurl` config options are filled out.
|
||||
|
||||
If you would like to re-enable password resets being sent from the identity server (warning: this is dangerous! See [#5345](https://github.com/matrix-org/synapse/pull/5345)), set `email.trust_identity_server_for_password_resets` to true. ([\#5377](https://github.com/matrix-org/synapse/issues/5377))
|
||||
- Set default room version to v4. ([\#5379](https://github.com/matrix-org/synapse/issues/5379))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fixes client-server API not sending "m.heroes" to lazy-load /sync requests when a rooms name or its canonical alias are empty. Thanks to @dnaf for this work! ([\#5089](https://github.com/matrix-org/synapse/issues/5089))
|
||||
- Prevent federation device list updates breaking when processing multiple updates at once. ([\#5156](https://github.com/matrix-org/synapse/issues/5156))
|
||||
- Fix worker registration bug caused by ClientReaderSlavedStore being unable to see get_profileinfo. ([\#5200](https://github.com/matrix-org/synapse/issues/5200))
|
||||
- Fix race when backfilling in rooms with worker mode. ([\#5221](https://github.com/matrix-org/synapse/issues/5221))
|
||||
- Fix appservice timestamp massaging. ([\#5233](https://github.com/matrix-org/synapse/issues/5233))
|
||||
- Ensure that server_keys fetched via a notary server are correctly signed. ([\#5251](https://github.com/matrix-org/synapse/issues/5251))
|
||||
- Show the correct error when logging out and access token is missing. ([\#5256](https://github.com/matrix-org/synapse/issues/5256))
|
||||
- Fix error code when there is an invalid parameter on /_matrix/client/r0/publicRooms ([\#5257](https://github.com/matrix-org/synapse/issues/5257))
|
||||
- Fix error when downloading thumbnail with missing width/height parameter. ([\#5258](https://github.com/matrix-org/synapse/issues/5258))
|
||||
- Fix schema update for account validity. ([\#5268](https://github.com/matrix-org/synapse/issues/5268))
|
||||
- Fix bug where we leaked extremities when we soft failed events, leading to performance degradation. ([\#5274](https://github.com/matrix-org/synapse/issues/5274), [\#5278](https://github.com/matrix-org/synapse/issues/5278), [\#5291](https://github.com/matrix-org/synapse/issues/5291))
|
||||
- Fix "db txn 'update_presence' from sentinel context" log messages. ([\#5275](https://github.com/matrix-org/synapse/issues/5275))
|
||||
- Fix dropped logcontexts during high outbound traffic. ([\#5277](https://github.com/matrix-org/synapse/issues/5277))
|
||||
- Fix a bug where it is not possible to get events in the federation format with the request `GET /_matrix/client/r0/rooms/{roomId}/messages`. ([\#5293](https://github.com/matrix-org/synapse/issues/5293))
|
||||
- Fix performance problems with the rooms stats background update. ([\#5294](https://github.com/matrix-org/synapse/issues/5294))
|
||||
- Fix noisy 'no key for server' logs. ([\#5300](https://github.com/matrix-org/synapse/issues/5300))
|
||||
- Fix bug where a notary server would sometimes forget old keys. ([\#5307](https://github.com/matrix-org/synapse/issues/5307))
|
||||
- Prevent users from setting huge displaynames and avatar URLs. ([\#5309](https://github.com/matrix-org/synapse/issues/5309))
|
||||
- Fix handling of failures when processing incoming events where calling `/event_auth` on remote server fails. ([\#5317](https://github.com/matrix-org/synapse/issues/5317))
|
||||
- Ensure that we have an up-to-date copy of the signing key when validating incoming federation requests. ([\#5321](https://github.com/matrix-org/synapse/issues/5321))
|
||||
- Fix various problems which made the signing-key notary server time out for some requests. ([\#5333](https://github.com/matrix-org/synapse/issues/5333))
|
||||
- Fix bug which would make certain operations (such as room joins) block for 20 minutes while attemoting to fetch verification keys. ([\#5334](https://github.com/matrix-org/synapse/issues/5334))
|
||||
- Fix a bug where we could rapidly mark a server as unreachable even though it was only down for a few minutes. ([\#5335](https://github.com/matrix-org/synapse/issues/5335), [\#5340](https://github.com/matrix-org/synapse/issues/5340))
|
||||
- Fix a bug where account validity renewal emails could only be sent when email notifs were enabled. ([\#5341](https://github.com/matrix-org/synapse/issues/5341))
|
||||
- Fix failure when fetching batches of events during backfill, etc. ([\#5342](https://github.com/matrix-org/synapse/issues/5342))
|
||||
- Add a new room version where the timestamps on events are checked against the validity periods on signing keys. ([\#5348](https://github.com/matrix-org/synapse/issues/5348), [\#5354](https://github.com/matrix-org/synapse/issues/5354))
|
||||
- Fix room stats and presence background updates to correctly handle missing events. ([\#5352](https://github.com/matrix-org/synapse/issues/5352))
|
||||
- Include left members in room summaries' heroes. ([\#5355](https://github.com/matrix-org/synapse/issues/5355))
|
||||
- Fix `federation_custom_ca_list` configuration option. ([\#5362](https://github.com/matrix-org/synapse/issues/5362))
|
||||
- Fix missing logcontext warnings on shutdown. ([\#5369](https://github.com/matrix-org/synapse/issues/5369))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix docs on resetting the user directory. ([\#5282](https://github.com/matrix-org/synapse/issues/5282))
|
||||
- Fix notes about ACME in the MSC1711 faq. ([\#5357](https://github.com/matrix-org/synapse/issues/5357))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Synapse will now serve the experimental "room complexity" API endpoint. ([\#5216](https://github.com/matrix-org/synapse/issues/5216))
|
||||
- The base classes for the v1 and v2_alpha REST APIs have been unified. ([\#5226](https://github.com/matrix-org/synapse/issues/5226), [\#5328](https://github.com/matrix-org/synapse/issues/5328))
|
||||
- Simplifications and comments in do_auth. ([\#5227](https://github.com/matrix-org/synapse/issues/5227))
|
||||
- Remove urllib3 pin as requests 2.22.0 has been released supporting urllib3 1.25.2. ([\#5230](https://github.com/matrix-org/synapse/issues/5230))
|
||||
- Preparatory work for key-validity features. ([\#5232](https://github.com/matrix-org/synapse/issues/5232), [\#5234](https://github.com/matrix-org/synapse/issues/5234), [\#5235](https://github.com/matrix-org/synapse/issues/5235), [\#5236](https://github.com/matrix-org/synapse/issues/5236), [\#5237](https://github.com/matrix-org/synapse/issues/5237), [\#5244](https://github.com/matrix-org/synapse/issues/5244), [\#5250](https://github.com/matrix-org/synapse/issues/5250), [\#5296](https://github.com/matrix-org/synapse/issues/5296), [\#5299](https://github.com/matrix-org/synapse/issues/5299), [\#5343](https://github.com/matrix-org/synapse/issues/5343), [\#5347](https://github.com/matrix-org/synapse/issues/5347), [\#5356](https://github.com/matrix-org/synapse/issues/5356))
|
||||
- Specify the type of reCAPTCHA key to use. ([\#5283](https://github.com/matrix-org/synapse/issues/5283))
|
||||
- Improve sample config for monthly active user blocking. ([\#5284](https://github.com/matrix-org/synapse/issues/5284))
|
||||
- Remove spurious debug from MatrixFederationHttpClient.get_json. ([\#5287](https://github.com/matrix-org/synapse/issues/5287))
|
||||
- Improve logging for logcontext leaks. ([\#5288](https://github.com/matrix-org/synapse/issues/5288))
|
||||
- Clarify that the admin change password API logs the user out. ([\#5303](https://github.com/matrix-org/synapse/issues/5303))
|
||||
- New installs will now use the v54 full schema, rather than the full schema v14 and applying incremental updates to v54. ([\#5320](https://github.com/matrix-org/synapse/issues/5320))
|
||||
- Improve docstrings on MatrixFederationClient. ([\#5332](https://github.com/matrix-org/synapse/issues/5332))
|
||||
- Clean up FederationClient.get_events for clarity. ([\#5344](https://github.com/matrix-org/synapse/issues/5344))
|
||||
- Various improvements to debug logging. ([\#5353](https://github.com/matrix-org/synapse/issues/5353))
|
||||
- Don't run CI build checks until sample config check has passed. ([\#5370](https://github.com/matrix-org/synapse/issues/5370))
|
||||
- Automatically retry buildkite builds (max twice) when an agent is lost. ([\#5380](https://github.com/matrix-org/synapse/issues/5380))
|
||||
|
||||
|
||||
Synapse 0.99.5.2 (2019-05-30)
|
||||
=============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix bug where we leaked extremities when we soft failed events, leading to performance degradation. ([\#5274](https://github.com/matrix-org/synapse/issues/5274), [\#5278](https://github.com/matrix-org/synapse/issues/5278), [\#5291](https://github.com/matrix-org/synapse/issues/5291))
|
||||
|
||||
|
||||
Synapse 0.99.5.1 (2019-05-22)
|
||||
=============================
|
||||
|
||||
0.99.5.1 supersedes 0.99.5 due to malformed debian changelog - no functional changes.
|
||||
|
||||
Synapse 0.99.5 (2019-05-22)
|
||||
===========================
|
||||
|
||||
No significant changes.
|
||||
|
||||
|
||||
Synapse 0.99.5rc1 (2019-05-21)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add ability to blacklist IP ranges for the federation client. ([\#5043](https://github.com/matrix-org/synapse/issues/5043))
|
||||
- Ratelimiting configuration for clients sending messages and the federation server has been altered to match login ratelimiting. The old configuration names will continue working. Check the sample config for details of the new names. ([\#5181](https://github.com/matrix-org/synapse/issues/5181))
|
||||
- Drop support for the undocumented /_matrix/client/v2_alpha API prefix. ([\#5190](https://github.com/matrix-org/synapse/issues/5190))
|
||||
- Add an option to disable per-room profiles. ([\#5196](https://github.com/matrix-org/synapse/issues/5196))
|
||||
- Stick an expiration date to any registered user missing one at startup if account validity is enabled. ([\#5204](https://github.com/matrix-org/synapse/issues/5204))
|
||||
- Add experimental support for relations (aka reactions and edits). ([\#5209](https://github.com/matrix-org/synapse/issues/5209), [\#5211](https://github.com/matrix-org/synapse/issues/5211), [\#5203](https://github.com/matrix-org/synapse/issues/5203), [\#5212](https://github.com/matrix-org/synapse/issues/5212))
|
||||
- Add a room version 4 which uses a new event ID format, as per [MSC2002](https://github.com/matrix-org/matrix-doc/pull/2002). ([\#5210](https://github.com/matrix-org/synapse/issues/5210), [\#5217](https://github.com/matrix-org/synapse/issues/5217))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix image orientation when generating thumbnails (needs pillow>=4.3.0). Contributed by Pau Rodriguez-Estivill. ([\#5039](https://github.com/matrix-org/synapse/issues/5039))
|
||||
- Exclude soft-failed events from forward-extremity candidates: fixes "No forward extremities left!" error. ([\#5146](https://github.com/matrix-org/synapse/issues/5146))
|
||||
- Re-order stages in registration flows such that msisdn and email verification are done last. ([\#5174](https://github.com/matrix-org/synapse/issues/5174))
|
||||
- Fix 3pid guest invites. ([\#5177](https://github.com/matrix-org/synapse/issues/5177))
|
||||
- Fix a bug where the register endpoint would fail with M_THREEPID_IN_USE instead of returning an account previously registered in the same session. ([\#5187](https://github.com/matrix-org/synapse/issues/5187))
|
||||
- Prevent registration for user ids that are too long to fit into a state key. Contributed by Reid Anderson. ([\#5198](https://github.com/matrix-org/synapse/issues/5198))
|
||||
- Fix incompatibility between ACME support and Python 3.5.2. ([\#5218](https://github.com/matrix-org/synapse/issues/5218))
|
||||
- Fix error handling for rooms whose versions are unknown. ([\#5219](https://github.com/matrix-org/synapse/issues/5219))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Make /sync attempt to return device updates for both joined and invited users. Note that this doesn't currently work correctly due to other bugs. ([\#3484](https://github.com/matrix-org/synapse/issues/3484))
|
||||
- Update tests to consistently be configured via the same code that is used when loading from configuration files. ([\#5171](https://github.com/matrix-org/synapse/issues/5171), [\#5185](https://github.com/matrix-org/synapse/issues/5185))
|
||||
- Allow client event serialization to be async. ([\#5183](https://github.com/matrix-org/synapse/issues/5183))
|
||||
- Expose DataStore._get_events as get_events_as_list. ([\#5184](https://github.com/matrix-org/synapse/issues/5184))
|
||||
- Make generating SQL bounds for pagination generic. ([\#5191](https://github.com/matrix-org/synapse/issues/5191))
|
||||
- Stop telling people to install the optional dependencies by default. ([\#5197](https://github.com/matrix-org/synapse/issues/5197))
|
||||
|
||||
|
||||
Synapse 0.99.4 (2019-05-15)
|
||||
===========================
|
||||
|
||||
No significant changes.
|
||||
|
||||
|
||||
Synapse 0.99.4rc1 (2019-05-13)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add systemd-python to the optional dependencies to enable logging to the systemd journal. Install with `pip install matrix-synapse[systemd]`. ([\#4339](https://github.com/matrix-org/synapse/issues/4339))
|
||||
- Add a default .m.rule.tombstone push rule. ([\#4867](https://github.com/matrix-org/synapse/issues/4867))
|
||||
- Add ability for password provider modules to bind email addresses to users upon registration. ([\#4947](https://github.com/matrix-org/synapse/issues/4947))
|
||||
- Implementation of [MSC1711](https://github.com/matrix-org/matrix-doc/pull/1711) including config options for requiring valid TLS certificates for federation traffic, the ability to disable TLS validation for specific domains, and the ability to specify your own list of CA certificates. ([\#4967](https://github.com/matrix-org/synapse/issues/4967))
|
||||
- Remove presence list support as per MSC 1819. ([\#4989](https://github.com/matrix-org/synapse/issues/4989))
|
||||
- Reduce CPU usage starting pushers during start up. ([\#4991](https://github.com/matrix-org/synapse/issues/4991))
|
||||
- Add a delete group admin API. ([\#5002](https://github.com/matrix-org/synapse/issues/5002))
|
||||
- Add config option to block users from looking up 3PIDs. ([\#5010](https://github.com/matrix-org/synapse/issues/5010))
|
||||
- Add context to phonehome stats. ([\#5020](https://github.com/matrix-org/synapse/issues/5020))
|
||||
- Configure the example systemd units to have a log identifier of `matrix-synapse`
|
||||
instead of the executable name, `python`.
|
||||
Contributed by Christoph Müller. ([\#5023](https://github.com/matrix-org/synapse/issues/5023))
|
||||
- Add time-based account expiration. ([\#5027](https://github.com/matrix-org/synapse/issues/5027), [\#5047](https://github.com/matrix-org/synapse/issues/5047), [\#5073](https://github.com/matrix-org/synapse/issues/5073), [\#5116](https://github.com/matrix-org/synapse/issues/5116))
|
||||
- Add support for handling `/versions`, `/voip` and `/push_rules` client endpoints to client_reader worker. ([\#5063](https://github.com/matrix-org/synapse/issues/5063), [\#5065](https://github.com/matrix-org/synapse/issues/5065), [\#5070](https://github.com/matrix-org/synapse/issues/5070))
|
||||
- Add a configuration option to require authentication on /publicRooms and /profile endpoints. ([\#5083](https://github.com/matrix-org/synapse/issues/5083))
|
||||
- Move admin APIs to `/_synapse/admin/v1`. (The old paths are retained for backwards-compatibility, for now). ([\#5119](https://github.com/matrix-org/synapse/issues/5119))
|
||||
- Implement an admin API for sending server notices. Many thanks to @krombel who provided a foundation for this work. ([\#5121](https://github.com/matrix-org/synapse/issues/5121), [\#5142](https://github.com/matrix-org/synapse/issues/5142))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Avoid redundant URL encoding of redirect URL for SSO login in the fallback login page. Fixes a regression introduced in [#4220](https://github.com/matrix-org/synapse/pull/4220). Contributed by Marcel Fabian Krüger ("[zaugin](https://github.com/zauguin)"). ([\#4555](https://github.com/matrix-org/synapse/issues/4555))
|
||||
- Fix bug where presence updates were sent to all servers in a room when a new server joined, rather than to just the new server. ([\#4942](https://github.com/matrix-org/synapse/issues/4942), [\#5103](https://github.com/matrix-org/synapse/issues/5103))
|
||||
- Fix sync bug which made accepting invites unreliable in worker-mode synapses. ([\#4955](https://github.com/matrix-org/synapse/issues/4955), [\#4956](https://github.com/matrix-org/synapse/issues/4956))
|
||||
- start.sh: Fix the --no-rate-limit option for messages and make it bypass rate limit on registration and login too. ([\#4981](https://github.com/matrix-org/synapse/issues/4981))
|
||||
- Transfer related groups on room upgrade. ([\#4990](https://github.com/matrix-org/synapse/issues/4990))
|
||||
- Prevent the ability to kick users from a room they aren't in. ([\#4999](https://github.com/matrix-org/synapse/issues/4999))
|
||||
- Fix issue #4596 so synapse_port_db script works with --curses option on Python 3. Contributed by Anders Jensen-Waud <anders@jensenwaud.com>. ([\#5003](https://github.com/matrix-org/synapse/issues/5003))
|
||||
- Clients timing out/disappearing while downloading from the media repository will now no longer log a spurious "Producer was not unregistered" message. ([\#5009](https://github.com/matrix-org/synapse/issues/5009))
|
||||
- Fix "cannot import name execute_batch" error with postgres. ([\#5032](https://github.com/matrix-org/synapse/issues/5032))
|
||||
- Fix disappearing exceptions in manhole. ([\#5035](https://github.com/matrix-org/synapse/issues/5035))
|
||||
- Workaround bug in twisted where attempting too many concurrent DNS requests could cause it to hang due to running out of file descriptors. ([\#5037](https://github.com/matrix-org/synapse/issues/5037))
|
||||
- Make sure we're not registering the same 3pid twice on registration. ([\#5071](https://github.com/matrix-org/synapse/issues/5071))
|
||||
- Don't crash on lack of expiry templates. ([\#5077](https://github.com/matrix-org/synapse/issues/5077))
|
||||
- Fix the ratelimiting on third party invites. ([\#5104](https://github.com/matrix-org/synapse/issues/5104))
|
||||
- Add some missing limitations to room alias creation. ([\#5124](https://github.com/matrix-org/synapse/issues/5124), [\#5128](https://github.com/matrix-org/synapse/issues/5128))
|
||||
- Limit the number of EDUs in transactions to 100 as expected by synapse. Thanks to @superboum for this work! ([\#5138](https://github.com/matrix-org/synapse/issues/5138))
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add test to verify threepid auth check added in #4435. ([\#4474](https://github.com/matrix-org/synapse/issues/4474))
|
||||
- Fix/improve some docstrings in the replication code. ([\#4949](https://github.com/matrix-org/synapse/issues/4949))
|
||||
- Split synapse.replication.tcp.streams into smaller files. ([\#4953](https://github.com/matrix-org/synapse/issues/4953))
|
||||
- Refactor replication row generation/parsing. ([\#4954](https://github.com/matrix-org/synapse/issues/4954))
|
||||
- Run `black` to clean up formatting on `synapse/storage/roommember.py` and `synapse/storage/events.py`. ([\#4959](https://github.com/matrix-org/synapse/issues/4959))
|
||||
- Remove log line for password via the admin API. ([\#4965](https://github.com/matrix-org/synapse/issues/4965))
|
||||
- Fix typo in TLS filenames in docker/README.md. Also add the '-p' commandline option to the 'docker run' example. Contributed by Jurrie Overgoor. ([\#4968](https://github.com/matrix-org/synapse/issues/4968))
|
||||
- Refactor room version definitions. ([\#4969](https://github.com/matrix-org/synapse/issues/4969))
|
||||
- Reduce log level of .well-known/matrix/client responses. ([\#4972](https://github.com/matrix-org/synapse/issues/4972))
|
||||
- Add `config.signing_key_path` that can be read by `synapse.config` utility. ([\#4974](https://github.com/matrix-org/synapse/issues/4974))
|
||||
- Track which identity server is used when binding a threepid and use that for unbinding, as per MSC1915. ([\#4982](https://github.com/matrix-org/synapse/issues/4982))
|
||||
- Rewrite KeyringTestCase as a HomeserverTestCase. ([\#4985](https://github.com/matrix-org/synapse/issues/4985))
|
||||
- README updates: Corrected the default POSTGRES_USER. Added port forwarding hint in TLS section. ([\#4987](https://github.com/matrix-org/synapse/issues/4987))
|
||||
- Remove a number of unused tables from the database schema. ([\#4992](https://github.com/matrix-org/synapse/issues/4992), [\#5028](https://github.com/matrix-org/synapse/issues/5028), [\#5033](https://github.com/matrix-org/synapse/issues/5033))
|
||||
- Run `black` on the remainder of `synapse/storage/`. ([\#4996](https://github.com/matrix-org/synapse/issues/4996))
|
||||
- Fix grammar in get_current_users_in_room and give it a docstring. ([\#4998](https://github.com/matrix-org/synapse/issues/4998))
|
||||
- Clean up some code in the server-key Keyring. ([\#5001](https://github.com/matrix-org/synapse/issues/5001))
|
||||
- Convert SYNAPSE_NO_TLS Docker variable to boolean for user friendliness. Contributed by Gabriel Eckerson. ([\#5005](https://github.com/matrix-org/synapse/issues/5005))
|
||||
- Refactor synapse.storage._base._simple_select_list_paginate. ([\#5007](https://github.com/matrix-org/synapse/issues/5007))
|
||||
- Store the notary server name correctly in server_keys_json. ([\#5024](https://github.com/matrix-org/synapse/issues/5024))
|
||||
- Rewrite Datastore.get_server_verify_keys to reduce the number of database transactions. ([\#5030](https://github.com/matrix-org/synapse/issues/5030))
|
||||
- Remove extraneous period from copyright headers. ([\#5046](https://github.com/matrix-org/synapse/issues/5046))
|
||||
- Update documentation for where to get Synapse packages. ([\#5067](https://github.com/matrix-org/synapse/issues/5067))
|
||||
- Add workarounds for pep-517 install errors. ([\#5098](https://github.com/matrix-org/synapse/issues/5098))
|
||||
- Improve logging when event-signature checks fail. ([\#5100](https://github.com/matrix-org/synapse/issues/5100))
|
||||
- Factor out an "assert_requester_is_admin" function. ([\#5120](https://github.com/matrix-org/synapse/issues/5120))
|
||||
- Remove the requirement to authenticate for /admin/server_version. ([\#5122](https://github.com/matrix-org/synapse/issues/5122))
|
||||
- Prevent an exception from being raised in a IResolutionReceiver and use a more generic error message for blacklisted URL previews. ([\#5155](https://github.com/matrix-org/synapse/issues/5155))
|
||||
- Run `black` on the tests directory. ([\#5170](https://github.com/matrix-org/synapse/issues/5170))
|
||||
- Fix CI after new release of isort. ([\#5179](https://github.com/matrix-org/synapse/issues/5179))
|
||||
- Fix bogus imports in unit tests. ([\#5154](https://github.com/matrix-org/synapse/issues/5154))
|
||||
|
||||
|
||||
Synapse 0.99.3.2 (2019-05-03)
|
||||
=============================
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Ensure that we have `urllib3` <1.25, to resolve incompatibility with `requests`. ([\#5135](https://github.com/matrix-org/synapse/issues/5135))
|
||||
|
||||
|
||||
Synapse 0.99.3.1 (2019-05-03)
|
||||
=============================
|
||||
|
||||
Security update
|
||||
---------------
|
||||
|
||||
This release includes two security fixes:
|
||||
|
||||
- Switch to using a cryptographically-secure random number generator for token strings, ensuring they cannot be predicted by an attacker. Thanks to @opnsec for identifying and responsibly disclosing this issue! ([\#5133](https://github.com/matrix-org/synapse/issues/5133))
|
||||
- Blacklist 0.0.0.0 and :: by default for URL previews. Thanks to @opnsec for identifying and responsibly disclosing this issue too! ([\#5134](https://github.com/matrix-org/synapse/issues/5134))
|
||||
|
||||
Synapse 0.99.3 (2019-04-01)
|
||||
===========================
|
||||
|
||||
|
||||
@@ -30,21 +30,20 @@ use github's pull request workflow to review the contribution, and either ask
|
||||
you to make any refinements needed or merge it and make them ourselves. The
|
||||
changes will then land on master when we next do a release.
|
||||
|
||||
We use `CircleCI <https://circleci.com/gh/matrix-org>`_ and `Travis CI
|
||||
<https://travis-ci.org/matrix-org/synapse>`_ for continuous integration. All
|
||||
pull requests to synapse get automatically tested by Travis and CircleCI.
|
||||
If your change breaks the build, this will be shown in GitHub, so please
|
||||
keep an eye on the pull request for feedback.
|
||||
We use `CircleCI <https://circleci.com/gh/matrix-org>`_ and `Buildkite
|
||||
<https://buildkite.com/matrix-dot-org/synapse>`_ for continuous integration.
|
||||
Buildkite builds need to be authorised by a maintainer. If your change breaks
|
||||
the build, this will be shown in GitHub, so please keep an eye on the pull
|
||||
request for feedback.
|
||||
|
||||
To run unit tests in a local development environment, you can use:
|
||||
|
||||
- ``tox -e py27`` (requires tox to be installed by ``pip install tox``) for
|
||||
SQLite-backed Synapse on Python 2.7.
|
||||
- ``tox -e py35`` for SQLite-backed Synapse on Python 3.5.
|
||||
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||
for SQLite-backed Synapse on Python 3.5.
|
||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||
- ``tox -e py27-postgres`` for PostgreSQL-backed Synapse on Python 2.7
|
||||
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||
(requires a running local PostgreSQL with access to create databases).
|
||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 2.7
|
||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||
set up PostgreSQL yourself.
|
||||
|
||||
|
||||
95
INSTALL.md
95
INSTALL.md
@@ -1,13 +1,31 @@
|
||||
* [Installing Synapse](#installing-synapse)
|
||||
* [Installing from source](#installing-from-source)
|
||||
* [Platform-Specific Instructions](#platform-specific-instructions)
|
||||
* [Troubleshooting Installation](#troubleshooting-installation)
|
||||
* [Prebuilt packages](#prebuilt-packages)
|
||||
* [Setting up Synapse](#setting-up-synapse)
|
||||
* [TLS certificates](#tls-certificates)
|
||||
* [Registering a user](#registering-a-user)
|
||||
* [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
* [URL previews](#url-previews)
|
||||
- [Choosing your server name](#choosing-your-server-name)
|
||||
- [Installing Synapse](#installing-synapse)
|
||||
- [Installing from source](#installing-from-source)
|
||||
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
- [Prebuilt packages](#prebuilt-packages)
|
||||
- [Setting up Synapse](#setting-up-synapse)
|
||||
- [TLS certificates](#tls-certificates)
|
||||
- [Email](#email)
|
||||
- [Registering a user](#registering-a-user)
|
||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
- [URL previews](#url-previews)
|
||||
|
||||
# Choosing your server name
|
||||
|
||||
It is important to choose the name for your server before you install Synapse,
|
||||
because it cannot be changed later.
|
||||
|
||||
The server name determines the "domain" part of user-ids for users on your
|
||||
server: these will all be of the format `@user:my.domain.name`. It also
|
||||
determines how other matrix servers will reach yours for federation.
|
||||
|
||||
For a test configuration, set this to the hostname of your server. For a more
|
||||
production-ready setup, you will probably want to specify your domain
|
||||
(`example.com`) rather than a matrix-specific hostname here (in the same way
|
||||
that your email address is probably `user@example.com` rather than
|
||||
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||
[Setting up Federation](docs/federate.md).
|
||||
|
||||
# Installing Synapse
|
||||
|
||||
@@ -35,7 +53,7 @@ virtualenv -p python3 ~/synapse/env
|
||||
source ~/synapse/env/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade setuptools
|
||||
pip install matrix-synapse[all]
|
||||
pip install matrix-synapse
|
||||
```
|
||||
|
||||
This will download Synapse from [PyPI](https://pypi.org/project/matrix-synapse)
|
||||
@@ -48,7 +66,7 @@ update flag:
|
||||
|
||||
```
|
||||
source ~/synapse/env/bin/activate
|
||||
pip install -U matrix-synapse[all]
|
||||
pip install -U matrix-synapse
|
||||
```
|
||||
|
||||
Before you can start Synapse, you will need to generate a configuration
|
||||
@@ -63,16 +81,7 @@ python -m synapse.app.homeserver \
|
||||
--report-stats=[yes|no]
|
||||
```
|
||||
|
||||
... substituting an appropriate value for `--server-name`. The server name
|
||||
determines the "domain" part of user-ids for users on your server: these will
|
||||
all be of the format `@user:my.domain.name`. It also determines how other
|
||||
matrix servers will reach yours for Federation. For a test configuration,
|
||||
set this to the hostname of your server. For a more production-ready setup, you
|
||||
will probably want to specify your domain (`example.com`) rather than a
|
||||
matrix-specific hostname here (in the same way that your email address is
|
||||
probably `user@example.com` rather than `user@email.example.com`) - but
|
||||
doing so may require more advanced setup: see [Setting up Federation](docs/federate.md).
|
||||
Beware that the server name cannot be changed later.
|
||||
... substituting an appropriate value for `--server-name`.
|
||||
|
||||
This command will generate you a config file that you can then customise, but it will
|
||||
also generate a set of keys for you. These keys will allow your Home Server to
|
||||
@@ -85,9 +94,6 @@ different. See the
|
||||
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||
for more information on key management.)
|
||||
|
||||
You will need to give Synapse a TLS certficate before it will start - see [TLS
|
||||
certificates](#tls-certificates).
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to
|
||||
run (e.g. `~/synapse`), and::
|
||||
|
||||
@@ -257,18 +263,29 @@ https://github.com/spantaleev/matrix-docker-ansible-deploy
|
||||
#### Matrix.org packages
|
||||
|
||||
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
||||
Synapse via https://matrix.org/packages/debian/. To use them:
|
||||
Synapse via https://packages.matrix.org/debian/. They are available for Debian
|
||||
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
||||
|
||||
```
|
||||
sudo apt install -y lsb-release curl apt-transport-https
|
||||
echo "deb https://matrix.org/packages/debian `lsb_release -cs` main" |
|
||||
sudo apt install -y lsb-release wget apt-transport-https
|
||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||
sudo tee /etc/apt/sources.list.d/matrix-org.list
|
||||
curl "https://matrix.org/packages/debian/repo-key.asc" |
|
||||
sudo apt-key add -
|
||||
sudo apt update
|
||||
sudo apt install matrix-synapse-py3
|
||||
```
|
||||
|
||||
**Note**: if you followed a previous version of these instructions which
|
||||
recommended using `apt-key add` to add an old key from
|
||||
`https://matrix.org/packages/debian/`, you should note that this key has been
|
||||
revoked. You should remove the old key with `sudo apt-key remove
|
||||
C35EB17E1EAE708E6603A9B3AD0592FE47F0DF61`, and follow the above instructions to
|
||||
update your configuration.
|
||||
|
||||
The fingerprint of the repository signing key (as shown by `gpg
|
||||
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||
|
||||
#### Downstream Debian/Ubuntu packages
|
||||
|
||||
For `buster` and `sid`, Synapse is available in the Debian repositories and
|
||||
@@ -383,8 +400,22 @@ To configure Synapse to expose an HTTPS port, you will need to edit
|
||||
instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||
`cert.pem`).
|
||||
|
||||
For those of you upgrading your TLS certificate in readiness for Synapse 1.0,
|
||||
please take a look at [our guide](docs/MSC1711_certificates_FAQ.md#configuring-certificates-for-compatibility-with-synapse-100).
|
||||
For a more detailed guide to configuring your server for federation, see
|
||||
[federate.md](docs/federate.md)
|
||||
|
||||
|
||||
## Email
|
||||
|
||||
It is desirable for Synapse to have the capability to send email. For example,
|
||||
this is required to support the 'password reset' feature.
|
||||
|
||||
To configure an SMTP server for Synapse, modify the configuration section
|
||||
headed ``email``, and be sure to have at least the ``smtp_host``, ``smtp_port``
|
||||
and ``notif_from`` fields filled out. You may also need to set ``smtp_user``,
|
||||
``smtp_pass``, and ``require_transport_security``.
|
||||
|
||||
If Synapse is not configured with an SMTP server, password reset via email will
|
||||
be disabled by default.
|
||||
|
||||
## Registering a user
|
||||
|
||||
|
||||
@@ -9,14 +9,19 @@ include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
recursive-include synapse/storage/schema *.sql
|
||||
recursive-include synapse/storage/schema *.sql.postgres
|
||||
recursive-include synapse/storage/schema *.sql.sqlite
|
||||
recursive-include synapse/storage/schema *.py
|
||||
recursive-include synapse/storage/schema *.txt
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include scripts *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.py
|
||||
include tests/http/ca.crt
|
||||
include tests/http/ca.key
|
||||
include tests/http/server.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
|
||||
36
README.rst
36
README.rst
@@ -173,7 +173,7 @@ Synapse offers two database engines:
|
||||
* `PostgreSQL <https://www.postgresql.org>`_
|
||||
|
||||
By default Synapse uses SQLite in and doing so trades performance for convenience.
|
||||
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||
light workloads.
|
||||
|
||||
Almost all installations should opt to use PostreSQL. Advantages include:
|
||||
@@ -272,7 +272,7 @@ to install using pip and a virtualenv::
|
||||
|
||||
virtualenv -p python3 env
|
||||
source env/bin/activate
|
||||
python -m pip install -e .[all]
|
||||
python -m pip install --no-pep-517 -e .[all]
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
@@ -340,8 +340,11 @@ log lines and looking for any 'Processed request' lines which take more than
|
||||
a few seconds to execute. Please let us know at #synapse:matrix.org if
|
||||
you see this failure mode so we can help debug it, however.
|
||||
|
||||
Help!! Synapse eats all my RAM!
|
||||
-------------------------------
|
||||
Help!! Synapse is slow and eats all my RAM/CPU!
|
||||
-----------------------------------------------
|
||||
|
||||
First, ensure you are running the latest version of Synapse, using Python 3
|
||||
with a PostgreSQL database.
|
||||
|
||||
Synapse's architecture is quite RAM hungry currently - we deliberately
|
||||
cache a lot of recent room data and metadata in RAM in order to speed up
|
||||
@@ -352,14 +355,29 @@ variable. The default is 0.5, which can be decreased to reduce RAM usage
|
||||
in memory constrained enviroments, or increased if performance starts to
|
||||
degrade.
|
||||
|
||||
However, degraded performance due to a low cache factor, common on
|
||||
machines with slow disks, often leads to explosions in memory use due
|
||||
backlogged requests. In this case, reducing the cache factor will make
|
||||
things worse. Instead, try increasing it drastically. 2.0 is a good
|
||||
starting value.
|
||||
|
||||
Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
|
||||
improvement in overall amount, and especially in terms of giving back RAM
|
||||
to the OS. To use it, the library must simply be put in the LD_PRELOAD
|
||||
environment variable when launching Synapse. On Debian, this can be done
|
||||
by installing the ``libjemalloc1`` package and adding this line to
|
||||
``/etc/default/matrix-synapse``::
|
||||
improvement in overall memory use, and especially in terms of giving back
|
||||
RAM to the OS. To use it, the library must simply be put in the
|
||||
LD_PRELOAD environment variable when launching Synapse. On Debian, this
|
||||
can be done by installing the ``libjemalloc1`` package and adding this
|
||||
line to ``/etc/default/matrix-synapse``::
|
||||
|
||||
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
|
||||
|
||||
This can make a significant difference on Python 2.7 - it's unclear how
|
||||
much of an improvement it provides on Python 3.x.
|
||||
|
||||
If you're encountering high CPU use by the Synapse process itself, you
|
||||
may be affected by a bug with presence tracking that leads to a
|
||||
massive excess of outgoing federation requests (see `discussion
|
||||
<https://github.com/matrix-org/synapse/issues/3971>`_). If metrics
|
||||
indicate that your server is also issuing far more outgoing federation
|
||||
requests than can be accounted for by your users' activity, this is a
|
||||
likely cause. The misbehavior can be worked around by setting
|
||||
``use_presence: false`` in the Synapse config file.
|
||||
|
||||
76
UPGRADE.rst
76
UPGRADE.rst
@@ -49,6 +49,82 @@ returned by the Client-Server API:
|
||||
# configured on port 443.
|
||||
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||
|
||||
Upgrading to v1.1.0
|
||||
===================
|
||||
|
||||
Synapse v1.1.0 removes support for older Python and PostgreSQL versions, as
|
||||
outlined in `our deprecation notice <https://matrix.org/blog/2019/04/08/synapse-deprecating-postgres-9-4-and-python-2-x>`_.
|
||||
|
||||
Minimum Python Version
|
||||
----------------------
|
||||
|
||||
Synapse v1.1.0 has a minimum Python requirement of Python 3.5. Python 3.6 or
|
||||
Python 3.7 are recommended as they have improved internal string handling,
|
||||
significantly reducing memory usage.
|
||||
|
||||
If you use current versions of the Matrix.org-distributed Debian packages or
|
||||
Docker images, action is not required.
|
||||
|
||||
If you install Synapse in a Python virtual environment, please see "Upgrading to
|
||||
v0.34.0" for notes on setting up a new virtualenv under Python 3.
|
||||
|
||||
Minimum PostgreSQL Version
|
||||
--------------------------
|
||||
|
||||
If using PostgreSQL under Synapse, you will need to use PostgreSQL 9.5 or above.
|
||||
Please see the
|
||||
`PostgreSQL documentation <https://www.postgresql.org/docs/11/upgrading.html>`_
|
||||
for more details on upgrading your database.
|
||||
|
||||
Upgrading to v1.0
|
||||
=================
|
||||
|
||||
Validation of TLS certificates
|
||||
------------------------------
|
||||
|
||||
Synapse v1.0 is the first release to enforce
|
||||
validation of TLS certificates for the federation API. It is therefore
|
||||
essential that your certificates are correctly configured. See the `FAQ
|
||||
<docs/MSC1711_certificates_FAQ.md>`_ for more information.
|
||||
|
||||
Note, v1.0 installations will also no longer be able to federate with servers
|
||||
that have not correctly configured their certificates.
|
||||
|
||||
In rare cases, it may be desirable to disable certificate checking: for
|
||||
example, it might be essential to be able to federate with a given legacy
|
||||
server in a closed federation. This can be done in one of two ways:-
|
||||
|
||||
* Configure the global switch ``federation_verify_certificates`` to ``false``.
|
||||
* Configure a whitelist of server domains to trust via ``federation_certificate_verification_whitelist``.
|
||||
|
||||
See the `sample configuration file <docs/sample_config.yaml>`_
|
||||
for more details on these settings.
|
||||
|
||||
Email
|
||||
-----
|
||||
When a user requests a password reset, Synapse will send an email to the
|
||||
user to confirm the request.
|
||||
|
||||
Previous versions of Synapse delegated the job of sending this email to an
|
||||
identity server. If the identity server was somehow malicious or became
|
||||
compromised, it would be theoretically possible to hijack an account through
|
||||
this means.
|
||||
|
||||
Therefore, by default, Synapse v1.0 will send the confirmation email itself. If
|
||||
Synapse is not configured with an SMTP server, password reset via email will be
|
||||
disabled.
|
||||
|
||||
To configure an SMTP server for Synapse, modify the configuration section
|
||||
headed ``email``, and be sure to have at least the ``smtp_host``, ``smtp_port``
|
||||
and ``notif_from`` fields filled out. You may also need to set ``smtp_user``,
|
||||
``smtp_pass``, and ``require_transport_security``.
|
||||
|
||||
If you are absolutely certain that you wish to continue using an identity
|
||||
server for password resets, set ``trust_identity_server_for_password_resets`` to ``true``.
|
||||
|
||||
See the `sample configuration file <docs/sample_config.yaml>`_
|
||||
for more details on these settings.
|
||||
|
||||
Upgrading to v0.99.0
|
||||
====================
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Add test to verify threepid auth check added in #4435.
|
||||
@@ -1 +0,0 @@
|
||||
Avoid redundant URL encoding of redirect URL for SSO login in the fallback login page. Fixes a regression introduced in [#4220](https://github.com/matrix-org/synapse/pull/4220). Contributed by Marcel Fabian Krüger ("[zaugin](https://github.com/zauguin)").
|
||||
@@ -1 +0,0 @@
|
||||
Fix bug where presence updates were sent to all servers in a room when a new server joined, rather than to just the new server.
|
||||
@@ -1 +0,0 @@
|
||||
Add ability for password provider modules to bind email addresses to users upon registration.
|
||||
@@ -1 +0,0 @@
|
||||
Fix/improve some docstrings in the replication code.
|
||||
@@ -1,2 +0,0 @@
|
||||
Split synapse.replication.tcp.streams into smaller files.
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Refactor replication row generation/parsing.
|
||||
@@ -1 +0,0 @@
|
||||
Fix sync bug which made accepting invites unreliable in worker-mode synapses.
|
||||
@@ -1 +0,0 @@
|
||||
Fix sync bug which made accepting invites unreliable in worker-mode synapses.
|
||||
@@ -1 +0,0 @@
|
||||
Run `black` to clean up formatting on `synapse/storage/roommember.py` and `synapse/storage/events.py`.
|
||||
@@ -1 +0,0 @@
|
||||
Remove log line for password via the admin API.
|
||||
@@ -1 +0,0 @@
|
||||
Fix typo in TLS filenames in docker/README.md. Also add the '-p' commandline option to the 'docker run' example. Contributed by Jurrie Overgoor.
|
||||
@@ -1,2 +0,0 @@
|
||||
Refactor room version definitions.
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Add `config.signing_key_path` that can be read by `synapse.config` utility.
|
||||
@@ -1 +0,0 @@
|
||||
start.sh: Fix the --no-rate-limit option for messages and make it bypass rate limit on registration and login too.
|
||||
@@ -1 +0,0 @@
|
||||
Track which identity server is used when binding a threepid and use that for unbinding, as per MSC1915.
|
||||
@@ -1 +0,0 @@
|
||||
Rewrite KeyringTestCase as a HomeserverTestCase.
|
||||
@@ -1 +0,0 @@
|
||||
README updates: Corrected the default POSTGRES_USER. Added port forwarding hint in TLS section.
|
||||
@@ -1 +0,0 @@
|
||||
Remove presence list support as per MSC 1819.
|
||||
@@ -1 +0,0 @@
|
||||
Transfer related groups on room upgrade.
|
||||
@@ -1 +0,0 @@
|
||||
Reduce CPU usage starting pushers during start up.
|
||||
@@ -1 +0,0 @@
|
||||
Run `black` on the remainder of `synapse/storage/`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix grammar in get_current_users_in_room and give it a docstring.
|
||||
@@ -1 +0,0 @@
|
||||
Prevent the ability to kick users from a room they aren't in.
|
||||
@@ -1 +0,0 @@
|
||||
Add a delete group admin API.
|
||||
@@ -1 +0,0 @@
|
||||
Fix issue #4596 so synapse_port_db script works with --curses option on Python 3. Contributed by Anders Jensen-Waud <anders@jensenwaud.com>.
|
||||
@@ -1 +0,0 @@
|
||||
Refactor synapse.storage._base._simple_select_list_paginate.
|
||||
@@ -1 +0,0 @@
|
||||
Add config option to block users from looking up 3PIDs.
|
||||
@@ -15,6 +15,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
""" Starts a synapse client console. """
|
||||
from __future__ import print_function
|
||||
|
||||
from twisted.internet import reactor, defer, threads
|
||||
from http import TwistedHttpClient
|
||||
@@ -36,9 +37,8 @@ from signedjson.sign import verify_signed_json, SignatureVerifyException
|
||||
|
||||
CONFIG_JSON = "cmdclient_config.json"
|
||||
|
||||
TRUSTED_ID_SERVERS = [
|
||||
'localhost:8001'
|
||||
]
|
||||
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||
|
||||
|
||||
class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@@ -58,7 +58,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
"token": token,
|
||||
"verbose": "on",
|
||||
"complete_usernames": "on",
|
||||
"send_delivery_receipts": "on"
|
||||
"send_delivery_receipts": "on",
|
||||
}
|
||||
self.path_prefix = "/_matrix/client/api/v1"
|
||||
self.event_stream_token = "END"
|
||||
@@ -109,7 +109,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
by using $. E.g. 'config roomid room1' then 'raw get /rooms/$roomid'.
|
||||
"""
|
||||
if len(line) == 0:
|
||||
print json.dumps(self.config, indent=4)
|
||||
print(json.dumps(self.config, indent=4))
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -119,12 +119,11 @@ class SynapseCmd(cmd.Cmd):
|
||||
config_rules = [ # key, valid_values
|
||||
("verbose", ["on", "off"]),
|
||||
("complete_usernames", ["on", "off"]),
|
||||
("send_delivery_receipts", ["on", "off"])
|
||||
("send_delivery_receipts", ["on", "off"]),
|
||||
]
|
||||
for key, valid_vals in config_rules:
|
||||
if key == args["key"] and args["val"] not in valid_vals:
|
||||
print "%s value must be one of %s" % (args["key"],
|
||||
valid_vals)
|
||||
print("%s value must be one of %s" % (args["key"], valid_vals))
|
||||
return
|
||||
|
||||
# toggle the http client verbosity
|
||||
@@ -133,11 +132,11 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
# assign the new config
|
||||
self.config[args["key"]] = args["val"]
|
||||
print json.dumps(self.config, indent=4)
|
||||
print(json.dumps(self.config, indent=4))
|
||||
|
||||
save_config(self.config)
|
||||
except Exception as e:
|
||||
print e
|
||||
print(e)
|
||||
|
||||
def do_register(self, line):
|
||||
"""Registers for a new account: "register <userid> <noupdate>"
|
||||
@@ -153,33 +152,32 @@ class SynapseCmd(cmd.Cmd):
|
||||
pwd = getpass.getpass("Type a password for this user: ")
|
||||
pwd2 = getpass.getpass("Retype the password: ")
|
||||
if pwd != pwd2 or len(pwd) == 0:
|
||||
print "Password mismatch."
|
||||
print("Password mismatch.")
|
||||
pwd = None
|
||||
else:
|
||||
password = pwd
|
||||
|
||||
body = {
|
||||
"type": "m.login.password"
|
||||
}
|
||||
body = {"type": "m.login.password"}
|
||||
if "userid" in args:
|
||||
body["user"] = args["userid"]
|
||||
if password:
|
||||
body["password"] = password
|
||||
|
||||
reactor.callFromThread(self._do_register, body,
|
||||
"noupdate" not in args)
|
||||
reactor.callFromThread(self._do_register, body, "noupdate" not in args)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_register(self, data, update_config):
|
||||
# check the registration flows
|
||||
url = self._url() + "/register"
|
||||
json_res = yield self.http_client.do_request("GET", url)
|
||||
print json.dumps(json_res, indent=4)
|
||||
print(json.dumps(json_res, indent=4))
|
||||
|
||||
passwordFlow = None
|
||||
for flow in json_res["flows"]:
|
||||
if flow["type"] == "m.login.recaptcha" or ("stages" in flow and "m.login.recaptcha" in flow["stages"]):
|
||||
print "Unable to register: Home server requires captcha."
|
||||
if flow["type"] == "m.login.recaptcha" or (
|
||||
"stages" in flow and "m.login.recaptcha" in flow["stages"]
|
||||
):
|
||||
print("Unable to register: Home server requires captcha.")
|
||||
return
|
||||
if flow["type"] == "m.login.password" and "stages" not in flow:
|
||||
passwordFlow = flow
|
||||
@@ -189,7 +187,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
return
|
||||
|
||||
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||
print json.dumps(json_res, indent=4)
|
||||
print(json.dumps(json_res, indent=4))
|
||||
if update_config and "user_id" in json_res:
|
||||
self.config["user"] = json_res["user_id"]
|
||||
self.config["token"] = json_res["access_token"]
|
||||
@@ -201,9 +199,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
"""
|
||||
try:
|
||||
args = self._parse(line, ["user_id"], force_keys=True)
|
||||
can_login = threads.blockingCallFromThread(
|
||||
reactor,
|
||||
self._check_can_login)
|
||||
can_login = threads.blockingCallFromThread(reactor, self._check_can_login)
|
||||
if can_login:
|
||||
p = getpass.getpass("Enter your password: ")
|
||||
user = args["user_id"]
|
||||
@@ -211,29 +207,25 @@ class SynapseCmd(cmd.Cmd):
|
||||
domain = self._domain()
|
||||
if domain:
|
||||
user = "@" + user + ":" + domain
|
||||
|
||||
|
||||
reactor.callFromThread(self._do_login, user, p)
|
||||
#print " got %s " % p
|
||||
# print " got %s " % p
|
||||
except Exception as e:
|
||||
print e
|
||||
print(e)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_login(self, user, password):
|
||||
path = "/login"
|
||||
data = {
|
||||
"user": user,
|
||||
"password": password,
|
||||
"type": "m.login.password"
|
||||
}
|
||||
data = {"user": user, "password": password, "type": "m.login.password"}
|
||||
url = self._url() + path
|
||||
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||
print json_res
|
||||
print(json_res)
|
||||
|
||||
if "access_token" in json_res:
|
||||
self.config["user"] = user
|
||||
self.config["token"] = json_res["access_token"]
|
||||
save_config(self.config)
|
||||
print "Login successful."
|
||||
print("Login successful.")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _check_can_login(self):
|
||||
@@ -242,18 +234,19 @@ class SynapseCmd(cmd.Cmd):
|
||||
# submitting!
|
||||
url = self._url() + path
|
||||
json_res = yield self.http_client.do_request("GET", url)
|
||||
print json_res
|
||||
print(json_res)
|
||||
|
||||
if "flows" not in json_res:
|
||||
print "Failed to find any login flows."
|
||||
print("Failed to find any login flows.")
|
||||
defer.returnValue(False)
|
||||
|
||||
flow = json_res["flows"][0] # assume first is the one we want.
|
||||
if ("type" not in flow or "m.login.password" != flow["type"] or
|
||||
"stages" in flow):
|
||||
flow = json_res["flows"][0] # assume first is the one we want.
|
||||
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
||||
fallback_url = self._url() + "/login/fallback"
|
||||
print ("Unable to login via the command line client. Please visit "
|
||||
"%s to login." % fallback_url)
|
||||
print(
|
||||
"Unable to login via the command line client. Please visit "
|
||||
"%s to login." % fallback_url
|
||||
)
|
||||
defer.returnValue(False)
|
||||
defer.returnValue(True)
|
||||
|
||||
@@ -263,21 +256,33 @@ class SynapseCmd(cmd.Cmd):
|
||||
<clientSecret> A string of characters generated when requesting an email that you'll supply in subsequent calls to identify yourself
|
||||
<sendAttempt> The number of times the user has requested an email. Leave this the same between requests to retry the request at the transport level. Increment it to request that the email be sent again.
|
||||
"""
|
||||
args = self._parse(line, ['address', 'clientSecret', 'sendAttempt'])
|
||||
args = self._parse(line, ["address", "clientSecret", "sendAttempt"])
|
||||
|
||||
postArgs = {'email': args['address'], 'clientSecret': args['clientSecret'], 'sendAttempt': args['sendAttempt']}
|
||||
postArgs = {
|
||||
"email": args["address"],
|
||||
"clientSecret": args["clientSecret"],
|
||||
"sendAttempt": args["sendAttempt"],
|
||||
}
|
||||
|
||||
reactor.callFromThread(self._do_emailrequest, postArgs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailrequest(self, args):
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/requestToken"
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
||||
)
|
||||
|
||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
||||
print json_res
|
||||
if 'sid' in json_res:
|
||||
print "Token sent. Your session ID is %s" % (json_res['sid'])
|
||||
json_res = yield self.http_client.do_request(
|
||||
"POST",
|
||||
url,
|
||||
data=urllib.urlencode(args),
|
||||
jsonreq=False,
|
||||
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||
)
|
||||
print(json_res)
|
||||
if "sid" in json_res:
|
||||
print("Token sent. Your session ID is %s" % (json_res["sid"]))
|
||||
|
||||
def do_emailvalidate(self, line):
|
||||
"""Validate and associate a third party ID
|
||||
@@ -285,39 +290,56 @@ class SynapseCmd(cmd.Cmd):
|
||||
<token> The token sent to your third party identifier address
|
||||
<clientSecret> The same clientSecret you supplied in requestToken
|
||||
"""
|
||||
args = self._parse(line, ['sid', 'token', 'clientSecret'])
|
||||
args = self._parse(line, ["sid", "token", "clientSecret"])
|
||||
|
||||
postArgs = { 'sid' : args['sid'], 'token' : args['token'], 'clientSecret': args['clientSecret'] }
|
||||
postArgs = {
|
||||
"sid": args["sid"],
|
||||
"token": args["token"],
|
||||
"clientSecret": args["clientSecret"],
|
||||
}
|
||||
|
||||
reactor.callFromThread(self._do_emailvalidate, postArgs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailvalidate(self, args):
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/submitToken"
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
||||
)
|
||||
|
||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
||||
print json_res
|
||||
json_res = yield self.http_client.do_request(
|
||||
"POST",
|
||||
url,
|
||||
data=urllib.urlencode(args),
|
||||
jsonreq=False,
|
||||
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||
)
|
||||
print(json_res)
|
||||
|
||||
def do_3pidbind(self, line):
|
||||
"""Validate and associate a third party ID
|
||||
<sid> The session ID (sid) given to you in the response to requestToken
|
||||
<clientSecret> The same clientSecret you supplied in requestToken
|
||||
"""
|
||||
args = self._parse(line, ['sid', 'clientSecret'])
|
||||
args = self._parse(line, ["sid", "clientSecret"])
|
||||
|
||||
postArgs = { 'sid' : args['sid'], 'clientSecret': args['clientSecret'] }
|
||||
postArgs['mxid'] = self.config["user"]
|
||||
postArgs = {"sid": args["sid"], "clientSecret": args["clientSecret"]}
|
||||
postArgs["mxid"] = self.config["user"]
|
||||
|
||||
reactor.callFromThread(self._do_3pidbind, postArgs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_3pidbind(self, args):
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/3pid/bind"
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
||||
|
||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
||||
print json_res
|
||||
json_res = yield self.http_client.do_request(
|
||||
"POST",
|
||||
url,
|
||||
data=urllib.urlencode(args),
|
||||
jsonreq=False,
|
||||
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||
)
|
||||
print(json_res)
|
||||
|
||||
def do_join(self, line):
|
||||
"""Joins a room: "join <roomid>" """
|
||||
@@ -325,7 +347,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
args = self._parse(line, ["roomid"], force_keys=True)
|
||||
self._do_membership_change(args["roomid"], "join", self._usr())
|
||||
except Exception as e:
|
||||
print e
|
||||
print(e)
|
||||
|
||||
def do_joinalias(self, line):
|
||||
try:
|
||||
@@ -333,7 +355,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
path = "/join/%s" % urllib.quote(args["roomname"])
|
||||
reactor.callFromThread(self._run_and_pprint, "POST", path, {})
|
||||
except Exception as e:
|
||||
print e
|
||||
print(e)
|
||||
|
||||
def do_topic(self, line):
|
||||
""""topic [set|get] <roomid> [<newtopic>]"
|
||||
@@ -343,26 +365,24 @@ class SynapseCmd(cmd.Cmd):
|
||||
try:
|
||||
args = self._parse(line, ["action", "roomid", "topic"])
|
||||
if "action" not in args or "roomid" not in args:
|
||||
print "Must specify set|get and a room ID."
|
||||
print("Must specify set|get and a room ID.")
|
||||
return
|
||||
if args["action"].lower() not in ["set", "get"]:
|
||||
print "Must specify set|get, not %s" % args["action"]
|
||||
print("Must specify set|get, not %s" % args["action"])
|
||||
return
|
||||
|
||||
path = "/rooms/%s/topic" % urllib.quote(args["roomid"])
|
||||
|
||||
if args["action"].lower() == "set":
|
||||
if "topic" not in args:
|
||||
print "Must specify a new topic."
|
||||
print("Must specify a new topic.")
|
||||
return
|
||||
body = {
|
||||
"topic": args["topic"]
|
||||
}
|
||||
body = {"topic": args["topic"]}
|
||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body)
|
||||
elif args["action"].lower() == "get":
|
||||
reactor.callFromThread(self._run_and_pprint, "GET", path)
|
||||
except Exception as e:
|
||||
print e
|
||||
print(e)
|
||||
|
||||
def do_invite(self, line):
|
||||
"""Invite a user to a room: "invite <userid> <roomid>" """
|
||||
@@ -373,49 +393,64 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
reactor.callFromThread(self._do_invite, args["roomid"], user_id)
|
||||
except Exception as e:
|
||||
print e
|
||||
print(e)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_invite(self, roomid, userstring):
|
||||
if (not userstring.startswith('@') and
|
||||
self._is_on("complete_usernames")):
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/lookup"
|
||||
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
||||
|
||||
json_res = yield self.http_client.do_request("GET", url, qparams={'medium':'email','address':userstring})
|
||||
json_res = yield self.http_client.do_request(
|
||||
"GET", url, qparams={"medium": "email", "address": userstring}
|
||||
)
|
||||
|
||||
mxid = None
|
||||
|
||||
if 'mxid' in json_res and 'signatures' in json_res:
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/pubkey/ed25519"
|
||||
if "mxid" in json_res and "signatures" in json_res:
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
||||
)
|
||||
|
||||
pubKey = None
|
||||
pubKeyObj = yield self.http_client.do_request("GET", url)
|
||||
if 'public_key' in pubKeyObj:
|
||||
pubKey = nacl.signing.VerifyKey(pubKeyObj['public_key'], encoder=nacl.encoding.HexEncoder)
|
||||
if "public_key" in pubKeyObj:
|
||||
pubKey = nacl.signing.VerifyKey(
|
||||
pubKeyObj["public_key"], encoder=nacl.encoding.HexEncoder
|
||||
)
|
||||
else:
|
||||
print "No public key found in pubkey response!"
|
||||
print("No public key found in pubkey response!")
|
||||
|
||||
sigValid = False
|
||||
|
||||
if pubKey:
|
||||
for signame in json_res['signatures']:
|
||||
for signame in json_res["signatures"]:
|
||||
if signame not in TRUSTED_ID_SERVERS:
|
||||
print "Ignoring signature from untrusted server %s" % (signame)
|
||||
print(
|
||||
"Ignoring signature from untrusted server %s"
|
||||
% (signame)
|
||||
)
|
||||
else:
|
||||
try:
|
||||
verify_signed_json(json_res, signame, pubKey)
|
||||
sigValid = True
|
||||
print "Mapping %s -> %s correctly signed by %s" % (userstring, json_res['mxid'], signame)
|
||||
print(
|
||||
"Mapping %s -> %s correctly signed by %s"
|
||||
% (userstring, json_res["mxid"], signame)
|
||||
)
|
||||
break
|
||||
except SignatureVerifyException as e:
|
||||
print "Invalid signature from %s" % (signame)
|
||||
print e
|
||||
print("Invalid signature from %s" % (signame))
|
||||
print(e)
|
||||
|
||||
if sigValid:
|
||||
print "Resolved 3pid %s to %s" % (userstring, json_res['mxid'])
|
||||
mxid = json_res['mxid']
|
||||
print("Resolved 3pid %s to %s" % (userstring, json_res["mxid"]))
|
||||
mxid = json_res["mxid"]
|
||||
else:
|
||||
print "Got association for %s but couldn't verify signature" % (userstring)
|
||||
print(
|
||||
"Got association for %s but couldn't verify signature"
|
||||
% (userstring)
|
||||
)
|
||||
|
||||
if not mxid:
|
||||
mxid = "@" + userstring + ":" + self._domain()
|
||||
@@ -428,18 +463,17 @@ class SynapseCmd(cmd.Cmd):
|
||||
args = self._parse(line, ["roomid"], force_keys=True)
|
||||
self._do_membership_change(args["roomid"], "leave", self._usr())
|
||||
except Exception as e:
|
||||
print e
|
||||
print(e)
|
||||
|
||||
def do_send(self, line):
|
||||
"""Sends a message. "send <roomid> <body>" """
|
||||
args = self._parse(line, ["roomid", "body"])
|
||||
txn_id = "txn%s" % int(time.time())
|
||||
path = "/rooms/%s/send/m.room.message/%s" % (urllib.quote(args["roomid"]),
|
||||
txn_id)
|
||||
body_json = {
|
||||
"msgtype": "m.text",
|
||||
"body": args["body"]
|
||||
}
|
||||
path = "/rooms/%s/send/m.room.message/%s" % (
|
||||
urllib.quote(args["roomid"]),
|
||||
txn_id,
|
||||
)
|
||||
body_json = {"msgtype": "m.text", "body": args["body"]}
|
||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body_json)
|
||||
|
||||
def do_list(self, line):
|
||||
@@ -453,10 +487,10 @@ class SynapseCmd(cmd.Cmd):
|
||||
"""
|
||||
args = self._parse(line, ["type", "roomid", "qp"])
|
||||
if not "type" in args or not "roomid" in args:
|
||||
print "Must specify type and room ID."
|
||||
print("Must specify type and room ID.")
|
||||
return
|
||||
if args["type"] not in ["members", "messages"]:
|
||||
print "Unrecognised type: %s" % args["type"]
|
||||
print("Unrecognised type: %s" % args["type"])
|
||||
return
|
||||
room_id = args["roomid"]
|
||||
path = "/rooms/%s/%s" % (urllib.quote(room_id), args["type"])
|
||||
@@ -468,11 +502,10 @@ class SynapseCmd(cmd.Cmd):
|
||||
key_value = key_value_str.split("=")
|
||||
qp[key_value[0]] = key_value[1]
|
||||
except:
|
||||
print "Bad query param: %s" % key_value
|
||||
print("Bad query param: %s" % key_value)
|
||||
return
|
||||
|
||||
reactor.callFromThread(self._run_and_pprint, "GET", path,
|
||||
query_params=qp)
|
||||
reactor.callFromThread(self._run_and_pprint, "GET", path, query_params=qp)
|
||||
|
||||
def do_create(self, line):
|
||||
"""Creates a room.
|
||||
@@ -508,14 +541,22 @@ class SynapseCmd(cmd.Cmd):
|
||||
args = self._parse(line, ["method", "path", "data"])
|
||||
# sanity check
|
||||
if "method" not in args or "path" not in args:
|
||||
print "Must specify path and method."
|
||||
print("Must specify path and method.")
|
||||
return
|
||||
|
||||
args["method"] = args["method"].upper()
|
||||
valid_methods = ["PUT", "GET", "POST", "DELETE",
|
||||
"XPUT", "XGET", "XPOST", "XDELETE"]
|
||||
valid_methods = [
|
||||
"PUT",
|
||||
"GET",
|
||||
"POST",
|
||||
"DELETE",
|
||||
"XPUT",
|
||||
"XGET",
|
||||
"XPOST",
|
||||
"XDELETE",
|
||||
]
|
||||
if args["method"] not in valid_methods:
|
||||
print "Unsupported method: %s" % args["method"]
|
||||
print("Unsupported method: %s" % args["method"])
|
||||
return
|
||||
|
||||
if "data" not in args:
|
||||
@@ -524,7 +565,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
try:
|
||||
args["data"] = json.loads(args["data"])
|
||||
except Exception as e:
|
||||
print "Data is not valid JSON. %s" % e
|
||||
print("Data is not valid JSON. %s" % e)
|
||||
return
|
||||
|
||||
qp = {"access_token": self._tok()}
|
||||
@@ -540,10 +581,13 @@ class SynapseCmd(cmd.Cmd):
|
||||
except:
|
||||
pass
|
||||
|
||||
reactor.callFromThread(self._run_and_pprint, args["method"],
|
||||
args["path"],
|
||||
args["data"],
|
||||
query_params=qp)
|
||||
reactor.callFromThread(
|
||||
self._run_and_pprint,
|
||||
args["method"],
|
||||
args["path"],
|
||||
args["data"],
|
||||
query_params=qp,
|
||||
)
|
||||
|
||||
def do_stream(self, line):
|
||||
"""Stream data from the server: "stream <longpoll timeout ms>" """
|
||||
@@ -553,26 +597,29 @@ class SynapseCmd(cmd.Cmd):
|
||||
try:
|
||||
timeout = int(args["timeout"])
|
||||
except ValueError:
|
||||
print "Timeout must be in milliseconds."
|
||||
print("Timeout must be in milliseconds.")
|
||||
return
|
||||
reactor.callFromThread(self._do_event_stream, timeout)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_event_stream(self, timeout):
|
||||
res = yield self.http_client.get_json(
|
||||
self._url() + "/events",
|
||||
{
|
||||
"access_token": self._tok(),
|
||||
"timeout": str(timeout),
|
||||
"from": self.event_stream_token
|
||||
})
|
||||
print json.dumps(res, indent=4)
|
||||
self._url() + "/events",
|
||||
{
|
||||
"access_token": self._tok(),
|
||||
"timeout": str(timeout),
|
||||
"from": self.event_stream_token,
|
||||
},
|
||||
)
|
||||
print(json.dumps(res, indent=4))
|
||||
|
||||
if "chunk" in res:
|
||||
for event in res["chunk"]:
|
||||
if (event["type"] == "m.room.message" and
|
||||
self._is_on("send_delivery_receipts") and
|
||||
event["user_id"] != self._usr()): # not sent by us
|
||||
if (
|
||||
event["type"] == "m.room.message"
|
||||
and self._is_on("send_delivery_receipts")
|
||||
and event["user_id"] != self._usr()
|
||||
): # not sent by us
|
||||
self._send_receipt(event, "d")
|
||||
|
||||
# update the position in the stram
|
||||
@@ -580,18 +627,28 @@ class SynapseCmd(cmd.Cmd):
|
||||
self.event_stream_token = res["end"]
|
||||
|
||||
def _send_receipt(self, event, feedback_type):
|
||||
path = ("/rooms/%s/messages/%s/%s/feedback/%s/%s" %
|
||||
(urllib.quote(event["room_id"]), event["user_id"], event["msg_id"],
|
||||
self._usr(), feedback_type))
|
||||
path = "/rooms/%s/messages/%s/%s/feedback/%s/%s" % (
|
||||
urllib.quote(event["room_id"]),
|
||||
event["user_id"],
|
||||
event["msg_id"],
|
||||
self._usr(),
|
||||
feedback_type,
|
||||
)
|
||||
data = {}
|
||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data,
|
||||
alt_text="Sent receipt for %s" % event["msg_id"])
|
||||
reactor.callFromThread(
|
||||
self._run_and_pprint,
|
||||
"PUT",
|
||||
path,
|
||||
data=data,
|
||||
alt_text="Sent receipt for %s" % event["msg_id"],
|
||||
)
|
||||
|
||||
def _do_membership_change(self, roomid, membership, userid):
|
||||
path = "/rooms/%s/state/m.room.member/%s" % (urllib.quote(roomid), urllib.quote(userid))
|
||||
data = {
|
||||
"membership": membership
|
||||
}
|
||||
path = "/rooms/%s/state/m.room.member/%s" % (
|
||||
urllib.quote(roomid),
|
||||
urllib.quote(userid),
|
||||
)
|
||||
data = {"membership": membership}
|
||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data)
|
||||
|
||||
def do_displayname(self, line):
|
||||
@@ -644,15 +701,20 @@ class SynapseCmd(cmd.Cmd):
|
||||
for i, arg in enumerate(line_args):
|
||||
for config_key in self.config:
|
||||
if ("$" + config_key) in arg:
|
||||
arg = arg.replace("$" + config_key,
|
||||
self.config[config_key])
|
||||
arg = arg.replace("$" + config_key, self.config[config_key])
|
||||
line_args[i] = arg
|
||||
|
||||
return dict(zip(keys, line_args))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _run_and_pprint(self, method, path, data=None,
|
||||
query_params={"access_token": None}, alt_text=None):
|
||||
def _run_and_pprint(
|
||||
self,
|
||||
method,
|
||||
path,
|
||||
data=None,
|
||||
query_params={"access_token": None},
|
||||
alt_text=None,
|
||||
):
|
||||
""" Runs an HTTP request and pretty prints the output.
|
||||
|
||||
Args:
|
||||
@@ -665,31 +727,31 @@ class SynapseCmd(cmd.Cmd):
|
||||
if "access_token" in query_params:
|
||||
query_params["access_token"] = self._tok()
|
||||
|
||||
json_res = yield self.http_client.do_request(method, url,
|
||||
data=data,
|
||||
qparams=query_params)
|
||||
json_res = yield self.http_client.do_request(
|
||||
method, url, data=data, qparams=query_params
|
||||
)
|
||||
if alt_text:
|
||||
print alt_text
|
||||
print(alt_text)
|
||||
else:
|
||||
print json.dumps(json_res, indent=4)
|
||||
print(json.dumps(json_res, indent=4))
|
||||
|
||||
|
||||
def save_config(config):
|
||||
with open(CONFIG_JSON, 'w') as out:
|
||||
with open(CONFIG_JSON, "w") as out:
|
||||
json.dump(config, out)
|
||||
|
||||
|
||||
def main(server_url, identity_server_url, username, token, config_path):
|
||||
print "Synapse command line client"
|
||||
print "==========================="
|
||||
print "Server: %s" % server_url
|
||||
print "Type 'help' to get started."
|
||||
print "Close this console with CTRL+C then CTRL+D."
|
||||
print("Synapse command line client")
|
||||
print("===========================")
|
||||
print("Server: %s" % server_url)
|
||||
print("Type 'help' to get started.")
|
||||
print("Close this console with CTRL+C then CTRL+D.")
|
||||
if not username or not token:
|
||||
print "- 'register <username>' - Register an account"
|
||||
print "- 'stream' - Connect to the event stream"
|
||||
print "- 'create <roomid>' - Create a room"
|
||||
print "- 'send <roomid> <message>' - Send a message"
|
||||
print("- 'register <username>' - Register an account")
|
||||
print("- 'stream' - Connect to the event stream")
|
||||
print("- 'create <roomid>' - Create a room")
|
||||
print("- 'send <roomid> <message>' - Send a message")
|
||||
http_client = TwistedHttpClient()
|
||||
|
||||
# the command line client
|
||||
@@ -699,13 +761,13 @@ def main(server_url, identity_server_url, username, token, config_path):
|
||||
global CONFIG_JSON
|
||||
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
||||
try:
|
||||
with open(config_path, 'r') as config:
|
||||
with open(config_path, "r") as config:
|
||||
syn_cmd.config = json.load(config)
|
||||
try:
|
||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||
except:
|
||||
pass
|
||||
print "Loaded config from %s" % config_path
|
||||
print("Loaded config from %s" % config_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -716,27 +778,37 @@ def main(server_url, identity_server_url, username, token, config_path):
|
||||
reactor.run()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("Starts a synapse client.")
|
||||
parser.add_argument(
|
||||
"-s", "--server", dest="server", default="http://localhost:8008",
|
||||
help="The URL of the home server to talk to.")
|
||||
"-s",
|
||||
"--server",
|
||||
dest="server",
|
||||
default="http://localhost:8008",
|
||||
help="The URL of the home server to talk to.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i", "--identity-server", dest="identityserver", default="http://localhost:8090",
|
||||
help="The URL of the identity server to talk to.")
|
||||
"-i",
|
||||
"--identity-server",
|
||||
dest="identityserver",
|
||||
default="http://localhost:8090",
|
||||
help="The URL of the identity server to talk to.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u", "--username", dest="username",
|
||||
help="Your username on the server.")
|
||||
"-u", "--username", dest="username", help="Your username on the server."
|
||||
)
|
||||
parser.add_argument("-t", "--token", dest="token", help="Your access token.")
|
||||
parser.add_argument(
|
||||
"-t", "--token", dest="token",
|
||||
help="Your access token.")
|
||||
parser.add_argument(
|
||||
"-c", "--config", dest="config", default=CONFIG_JSON,
|
||||
help="The location of the config.json file to read from.")
|
||||
"-c",
|
||||
"--config",
|
||||
dest="config",
|
||||
default=CONFIG_JSON,
|
||||
help="The location of the config.json file to read from.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.server:
|
||||
print "You must supply a server URL to communicate with."
|
||||
print("You must supply a server URL to communicate with.")
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
from twisted.web.client import Agent, readBody
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.internet import defer, reactor
|
||||
@@ -72,9 +73,7 @@ class TwistedHttpClient(HttpClient):
|
||||
@defer.inlineCallbacks
|
||||
def put_json(self, url, data):
|
||||
response = yield self._create_put_request(
|
||||
url,
|
||||
data,
|
||||
headers_dict={"Content-Type": ["application/json"]}
|
||||
url, data, headers_dict={"Content-Type": ["application/json"]}
|
||||
)
|
||||
body = yield readBody(response)
|
||||
defer.returnValue((response.code, body))
|
||||
@@ -94,40 +93,34 @@ class TwistedHttpClient(HttpClient):
|
||||
"""
|
||||
|
||||
if "Content-Type" not in headers_dict:
|
||||
raise defer.error(
|
||||
RuntimeError("Must include Content-Type header for PUTs"))
|
||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||
|
||||
return self._create_request(
|
||||
"PUT",
|
||||
url,
|
||||
producer=_JsonProducer(json_data),
|
||||
headers_dict=headers_dict
|
||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||
)
|
||||
|
||||
def _create_get_request(self, url, headers_dict={}):
|
||||
""" Wrapper of _create_request to issue a GET request
|
||||
"""
|
||||
return self._create_request(
|
||||
"GET",
|
||||
url,
|
||||
headers_dict=headers_dict
|
||||
)
|
||||
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def do_request(self, method, url, data=None, qparams=None, jsonreq=True, headers={}):
|
||||
def do_request(
|
||||
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
|
||||
):
|
||||
if qparams:
|
||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||
|
||||
if jsonreq:
|
||||
prod = _JsonProducer(data)
|
||||
headers['Content-Type'] = ["application/json"];
|
||||
headers["Content-Type"] = ["application/json"]
|
||||
else:
|
||||
prod = _RawProducer(data)
|
||||
|
||||
if method in ["POST", "PUT"]:
|
||||
response = yield self._create_request(method, url,
|
||||
producer=prod,
|
||||
headers_dict=headers)
|
||||
response = yield self._create_request(
|
||||
method, url, producer=prod, headers_dict=headers
|
||||
)
|
||||
else:
|
||||
response = yield self._create_request(method, url)
|
||||
|
||||
@@ -141,27 +134,24 @@ class TwistedHttpClient(HttpClient):
|
||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||
|
||||
retries_left = 5
|
||||
print "%s to %s with headers %s" % (method, url, headers_dict)
|
||||
print("%s to %s with headers %s" % (method, url, headers_dict))
|
||||
if self.verbose and producer:
|
||||
if "password" in producer.data:
|
||||
temp = producer.data["password"]
|
||||
producer.data["password"] = "[REDACTED]"
|
||||
print json.dumps(producer.data, indent=4)
|
||||
print(json.dumps(producer.data, indent=4))
|
||||
producer.data["password"] = temp
|
||||
else:
|
||||
print json.dumps(producer.data, indent=4)
|
||||
print(json.dumps(producer.data, indent=4))
|
||||
|
||||
while True:
|
||||
try:
|
||||
response = yield self.agent.request(
|
||||
method,
|
||||
url.encode("UTF8"),
|
||||
Headers(headers_dict),
|
||||
producer
|
||||
method, url.encode("UTF8"), Headers(headers_dict), producer
|
||||
)
|
||||
break
|
||||
except Exception as e:
|
||||
print "uh oh: %s" % e
|
||||
print("uh oh: %s" % e)
|
||||
if retries_left:
|
||||
yield self.sleep(2 ** (5 - retries_left))
|
||||
retries_left -= 1
|
||||
@@ -169,8 +159,8 @@ class TwistedHttpClient(HttpClient):
|
||||
raise e
|
||||
|
||||
if self.verbose:
|
||||
print "Status %s %s" % (response.code, response.phrase)
|
||||
print pformat(list(response.headers.getAllRawHeaders()))
|
||||
print("Status %s %s" % (response.code, response.phrase))
|
||||
print(pformat(list(response.headers.getAllRawHeaders())))
|
||||
defer.returnValue(response)
|
||||
|
||||
def sleep(self, seconds):
|
||||
@@ -178,6 +168,7 @@ class TwistedHttpClient(HttpClient):
|
||||
reactor.callLater(seconds, d.callback, seconds)
|
||||
return d
|
||||
|
||||
|
||||
class _RawProducer(object):
|
||||
def __init__(self, data):
|
||||
self.data = data
|
||||
@@ -194,9 +185,11 @@ class _RawProducer(object):
|
||||
def stopProducing(self):
|
||||
pass
|
||||
|
||||
|
||||
class _JsonProducer(object):
|
||||
""" Used by the twisted http client to create the HTTP body from json
|
||||
"""
|
||||
|
||||
def __init__(self, jsn):
|
||||
self.data = jsn
|
||||
self.body = json.dumps(jsn).encode("utf8")
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# Synapse Docker
|
||||
|
||||
FIXME: this is out-of-date as of
|
||||
https://github.com/matrix-org/synapse/issues/5518. Contributions to bring it up
|
||||
to date would be welcome.
|
||||
|
||||
### Automated configuration
|
||||
|
||||
It is recommended that you use Docker Compose to run your containers, including
|
||||
|
||||
@@ -19,13 +19,13 @@ from curses.ascii import isprint
|
||||
from twisted.internet import reactor
|
||||
|
||||
|
||||
class CursesStdIO():
|
||||
class CursesStdIO:
|
||||
def __init__(self, stdscr, callback=None):
|
||||
self.statusText = "Synapse test app -"
|
||||
self.searchText = ''
|
||||
self.searchText = ""
|
||||
self.stdscr = stdscr
|
||||
|
||||
self.logLine = ''
|
||||
self.logLine = ""
|
||||
|
||||
self.callback = callback
|
||||
|
||||
@@ -71,8 +71,7 @@ class CursesStdIO():
|
||||
i = 0
|
||||
index = len(self.lines) - 1
|
||||
while i < (self.rows - 3) and index >= 0:
|
||||
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index],
|
||||
curses.A_NORMAL)
|
||||
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index], curses.A_NORMAL)
|
||||
i = i + 1
|
||||
index = index - 1
|
||||
|
||||
@@ -85,15 +84,13 @@ class CursesStdIO():
|
||||
raise RuntimeError("TextTooLongError")
|
||||
|
||||
self.stdscr.addstr(
|
||||
self.rows - 2, 0,
|
||||
text + ' ' * (self.cols - len(text)),
|
||||
curses.A_STANDOUT)
|
||||
self.rows - 2, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||
)
|
||||
|
||||
def printLogLine(self, text):
|
||||
self.stdscr.addstr(
|
||||
0, 0,
|
||||
text + ' ' * (self.cols - len(text)),
|
||||
curses.A_STANDOUT)
|
||||
0, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||
)
|
||||
|
||||
def doRead(self):
|
||||
""" Input is ready! """
|
||||
@@ -105,7 +102,7 @@ class CursesStdIO():
|
||||
|
||||
elif c == curses.KEY_ENTER or c == 10:
|
||||
text = self.searchText
|
||||
self.searchText = ''
|
||||
self.searchText = ""
|
||||
|
||||
self.print_line(">> %s" % text)
|
||||
|
||||
@@ -122,11 +119,13 @@ class CursesStdIO():
|
||||
return
|
||||
self.searchText = self.searchText + chr(c)
|
||||
|
||||
self.stdscr.addstr(self.rows - 1, 0,
|
||||
self.searchText + (' ' * (
|
||||
self.cols - len(self.searchText) - 2)))
|
||||
self.stdscr.addstr(
|
||||
self.rows - 1,
|
||||
0,
|
||||
self.searchText + (" " * (self.cols - len(self.searchText) - 2)),
|
||||
)
|
||||
|
||||
self.paintStatus(self.statusText + ' %d' % len(self.searchText))
|
||||
self.paintStatus(self.statusText + " %d" % len(self.searchText))
|
||||
self.stdscr.move(self.rows - 1, len(self.searchText))
|
||||
self.stdscr.refresh()
|
||||
|
||||
@@ -143,7 +142,6 @@ class CursesStdIO():
|
||||
|
||||
|
||||
class Callback(object):
|
||||
|
||||
def __init__(self, stdio):
|
||||
self.stdio = stdio
|
||||
|
||||
@@ -152,7 +150,7 @@ class Callback(object):
|
||||
|
||||
|
||||
def main(stdscr):
|
||||
screen = CursesStdIO(stdscr) # create Screen object
|
||||
screen = CursesStdIO(stdscr) # create Screen object
|
||||
|
||||
callback = Callback(screen)
|
||||
|
||||
@@ -164,5 +162,5 @@ def main(stdscr):
|
||||
screen.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
curses.wrapper(main)
|
||||
|
||||
@@ -28,9 +28,7 @@ Currently assumes the local address is localhost:<port>
|
||||
"""
|
||||
|
||||
|
||||
from synapse.federation import (
|
||||
ReplicationHandler
|
||||
)
|
||||
from synapse.federation import ReplicationHandler
|
||||
|
||||
from synapse.federation.units import Pdu
|
||||
|
||||
@@ -38,7 +36,7 @@ from synapse.util import origin_from_ucid
|
||||
|
||||
from synapse.app.homeserver import SynapseHomeServer
|
||||
|
||||
#from synapse.util.logutils import log_function
|
||||
# from synapse.util.logutils import log_function
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python import log
|
||||
@@ -83,7 +81,7 @@ class InputOutput(object):
|
||||
room_name, = m.groups()
|
||||
self.print_line("%s joining %s" % (self.user, room_name))
|
||||
self.server.join_room(room_name, self.user, self.user)
|
||||
#self.print_line("OK.")
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match("^invite (\S+) (\S+)$", line)
|
||||
@@ -92,7 +90,7 @@ class InputOutput(object):
|
||||
room_name, invitee = m.groups()
|
||||
self.print_line("%s invited to %s" % (invitee, room_name))
|
||||
self.server.invite_to_room(room_name, self.user, invitee)
|
||||
#self.print_line("OK.")
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match("^send (\S+) (.*)$", line)
|
||||
@@ -101,7 +99,7 @@ class InputOutput(object):
|
||||
room_name, body = m.groups()
|
||||
self.print_line("%s send to %s" % (self.user, room_name))
|
||||
self.server.send_message(room_name, self.user, body)
|
||||
#self.print_line("OK.")
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match("^backfill (\S+)$", line)
|
||||
@@ -125,7 +123,6 @@ class InputOutput(object):
|
||||
|
||||
|
||||
class IOLoggerHandler(logging.Handler):
|
||||
|
||||
def __init__(self, io):
|
||||
logging.Handler.__init__(self)
|
||||
self.io = io
|
||||
@@ -142,6 +139,7 @@ class Room(object):
|
||||
""" Used to store (in memory) the current membership state of a room, and
|
||||
which home servers we should send PDUs associated with the room to.
|
||||
"""
|
||||
|
||||
def __init__(self, room_name):
|
||||
self.room_name = room_name
|
||||
self.invited = set()
|
||||
@@ -175,6 +173,7 @@ class HomeServer(ReplicationHandler):
|
||||
""" A very basic home server implentation that allows people to join a
|
||||
room and then invite other people.
|
||||
"""
|
||||
|
||||
def __init__(self, server_name, replication_layer, output):
|
||||
self.server_name = server_name
|
||||
self.replication_layer = replication_layer
|
||||
@@ -197,26 +196,27 @@ class HomeServer(ReplicationHandler):
|
||||
elif pdu.content["membership"] == "invite":
|
||||
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
||||
else:
|
||||
self.output.print_line("#%s (unrec) %s = %s" %
|
||||
(pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||
self.output.print_line(
|
||||
"#%s (unrec) %s = %s"
|
||||
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||
)
|
||||
|
||||
#def on_state_change(self, pdu):
|
||||
##self.output.print_line("#%s (state) %s *** %s" %
|
||||
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||
##)
|
||||
# def on_state_change(self, pdu):
|
||||
##self.output.print_line("#%s (state) %s *** %s" %
|
||||
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||
##)
|
||||
|
||||
#if "joinee" in pdu.content:
|
||||
#self._on_join(pdu.context, pdu.content["joinee"])
|
||||
#elif "invitee" in pdu.content:
|
||||
#self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||
# if "joinee" in pdu.content:
|
||||
# self._on_join(pdu.context, pdu.content["joinee"])
|
||||
# elif "invitee" in pdu.content:
|
||||
# self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||
|
||||
def _on_message(self, pdu):
|
||||
""" We received a message
|
||||
"""
|
||||
self.output.print_line("#%s %s %s" %
|
||||
(pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||
)
|
||||
self.output.print_line(
|
||||
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||
)
|
||||
|
||||
def _on_join(self, context, joinee):
|
||||
""" Someone has joined a room, either a remote user or a local user
|
||||
@@ -224,9 +224,7 @@ class HomeServer(ReplicationHandler):
|
||||
room = self._get_or_create_room(context)
|
||||
room.add_participant(joinee)
|
||||
|
||||
self.output.print_line("#%s %s %s" %
|
||||
(context, joinee, "*** JOINED")
|
||||
)
|
||||
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||
|
||||
def _on_invite(self, origin, context, invitee):
|
||||
""" Someone has been invited
|
||||
@@ -234,9 +232,7 @@ class HomeServer(ReplicationHandler):
|
||||
room = self._get_or_create_room(context)
|
||||
room.add_invited(invitee)
|
||||
|
||||
self.output.print_line("#%s %s %s" %
|
||||
(context, invitee, "*** INVITED")
|
||||
)
|
||||
self.output.print_line("#%s %s %s" % (context, invitee, "*** INVITED"))
|
||||
|
||||
if not room.have_got_metadata and origin is not self.server_name:
|
||||
logger.debug("Get room state")
|
||||
@@ -272,14 +268,14 @@ class HomeServer(ReplicationHandler):
|
||||
|
||||
try:
|
||||
pdu = Pdu.create_new(
|
||||
context=room_name,
|
||||
pdu_type="sy.room.member",
|
||||
is_state=True,
|
||||
state_key=joinee,
|
||||
content={"membership": "join"},
|
||||
origin=self.server_name,
|
||||
destinations=destinations,
|
||||
)
|
||||
context=room_name,
|
||||
pdu_type="sy.room.member",
|
||||
is_state=True,
|
||||
state_key=joinee,
|
||||
content={"membership": "join"},
|
||||
origin=self.server_name,
|
||||
destinations=destinations,
|
||||
)
|
||||
yield self.replication_layer.send_pdu(pdu)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
@@ -318,21 +314,21 @@ class HomeServer(ReplicationHandler):
|
||||
return self.replication_layer.backfill(dest, room_name, limit)
|
||||
|
||||
def _get_room_remote_servers(self, room_name):
|
||||
return [i for i in self.joined_rooms.setdefault(room_name,).servers]
|
||||
return [i for i in self.joined_rooms.setdefault(room_name).servers]
|
||||
|
||||
def _get_or_create_room(self, room_name):
|
||||
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||
|
||||
def get_servers_for_context(self, context):
|
||||
return defer.succeed(
|
||||
self.joined_rooms.setdefault(context, Room(context)).servers
|
||||
)
|
||||
self.joined_rooms.setdefault(context, Room(context)).servers
|
||||
)
|
||||
|
||||
|
||||
def main(stdscr):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('user', type=str)
|
||||
parser.add_argument('-v', '--verbose', action='count')
|
||||
parser.add_argument("user", type=str)
|
||||
parser.add_argument("-v", "--verbose", action="count")
|
||||
args = parser.parse_args()
|
||||
|
||||
user = args.user
|
||||
@@ -342,8 +338,9 @@ def main(stdscr):
|
||||
|
||||
root_logger = logging.getLogger()
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(lineno)d - '
|
||||
'%(levelname)s - %(message)s')
|
||||
formatter = logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(lineno)d - " "%(levelname)s - %(message)s"
|
||||
)
|
||||
if not os.path.exists("logs"):
|
||||
os.makedirs("logs")
|
||||
fh = logging.FileHandler("logs/%s" % user)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,5 @@
|
||||
from __future__ import print_function
|
||||
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@@ -48,7 +50,7 @@ def make_graph(pdus, room, filename_prefix):
|
||||
c = colors.pop()
|
||||
color_map[o] = c
|
||||
except:
|
||||
print "Run out of colours!"
|
||||
print("Run out of colours!")
|
||||
color_map[o] = "black"
|
||||
|
||||
graph = pydot.Dot(graph_name="Test")
|
||||
@@ -57,9 +59,9 @@ def make_graph(pdus, room, filename_prefix):
|
||||
name = make_name(pdu.get("pdu_id"), pdu.get("origin"))
|
||||
pdu_map[name] = pdu
|
||||
|
||||
t = datetime.datetime.fromtimestamp(
|
||||
float(pdu["ts"]) / 1000
|
||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||
t = datetime.datetime.fromtimestamp(float(pdu["ts"]) / 1000).strftime(
|
||||
"%Y-%m-%d %H:%M:%S,%f"
|
||||
)
|
||||
|
||||
label = (
|
||||
"<"
|
||||
@@ -79,11 +81,7 @@ def make_graph(pdus, room, filename_prefix):
|
||||
"depth": pdu.get("depth"),
|
||||
}
|
||||
|
||||
node = pydot.Node(
|
||||
name=name,
|
||||
label=label,
|
||||
color=color_map[pdu.get("origin")]
|
||||
)
|
||||
node = pydot.Node(name=name, label=label, color=color_map[pdu.get("origin")])
|
||||
node_map[name] = node
|
||||
graph.add_node(node)
|
||||
|
||||
@@ -93,7 +91,7 @@ def make_graph(pdus, room, filename_prefix):
|
||||
end_name = make_name(i, o)
|
||||
|
||||
if end_name not in node_map:
|
||||
print "%s not in nodes" % end_name
|
||||
print("%s not in nodes" % end_name)
|
||||
continue
|
||||
|
||||
edge = pydot.Edge(node_map[start_name], node_map[end_name])
|
||||
@@ -107,14 +105,13 @@ def make_graph(pdus, room, filename_prefix):
|
||||
|
||||
if prev_state_name in node_map:
|
||||
state_edge = pydot.Edge(
|
||||
node_map[start_name], node_map[prev_state_name],
|
||||
style='dotted'
|
||||
node_map[start_name], node_map[prev_state_name], style="dotted"
|
||||
)
|
||||
graph.add_edge(state_edge)
|
||||
|
||||
graph.write('%s.dot' % filename_prefix, format='raw', prog='dot')
|
||||
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
||||
graph.write_svg("%s.svg" % filename_prefix, prog='dot')
|
||||
graph.write("%s.dot" % filename_prefix, format="raw", prog="dot")
|
||||
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||
|
||||
|
||||
def get_pdus(host, room):
|
||||
@@ -130,15 +127,14 @@ def get_pdus(host, room):
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a PDU graph for a given room by talking "
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--prefix", dest="prefix",
|
||||
help="String to prefix output files with"
|
||||
"-p", "--prefix", dest="prefix", help="String to prefix output files with"
|
||||
)
|
||||
parser.add_argument('host')
|
||||
parser.add_argument('room')
|
||||
parser.add_argument("host")
|
||||
parser.add_argument("room")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
@@ -36,10 +36,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
||||
args = [room_id]
|
||||
|
||||
if limit:
|
||||
sql += (
|
||||
" ORDER BY topological_ordering DESC, stream_ordering DESC "
|
||||
"LIMIT ?"
|
||||
)
|
||||
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC " "LIMIT ?"
|
||||
|
||||
args.append(limit)
|
||||
|
||||
@@ -56,9 +53,8 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
||||
|
||||
for event in events:
|
||||
c = conn.execute(
|
||||
"SELECT state_group FROM event_to_state_groups "
|
||||
"WHERE event_id = ?",
|
||||
(event.event_id,)
|
||||
"SELECT state_group FROM event_to_state_groups " "WHERE event_id = ?",
|
||||
(event.event_id,),
|
||||
)
|
||||
|
||||
res = c.fetchone()
|
||||
@@ -69,7 +65,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
||||
|
||||
t = datetime.datetime.fromtimestamp(
|
||||
float(event.origin_server_ts) / 1000
|
||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
||||
|
||||
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
||||
|
||||
@@ -93,10 +89,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
||||
"state_group": state_group,
|
||||
}
|
||||
|
||||
node = pydot.Node(
|
||||
name=event.event_id,
|
||||
label=label,
|
||||
)
|
||||
node = pydot.Node(name=event.event_id, label=label)
|
||||
|
||||
node_map[event.event_id] = node
|
||||
graph.add_node(node)
|
||||
@@ -106,10 +99,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
||||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except:
|
||||
end_node = pydot.Node(
|
||||
name=prev_id,
|
||||
label="<<b>%s</b>>" % (prev_id,),
|
||||
)
|
||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
graph.add_node(end_node)
|
||||
@@ -121,36 +111,33 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
||||
if len(event_ids) <= 1:
|
||||
continue
|
||||
|
||||
cluster = pydot.Cluster(
|
||||
str(group),
|
||||
label="<State Group: %s>" % (str(group),)
|
||||
)
|
||||
cluster = pydot.Cluster(str(group), label="<State Group: %s>" % (str(group),))
|
||||
|
||||
for event_id in event_ids:
|
||||
cluster.add_node(node_map[event_id])
|
||||
|
||||
graph.add_subgraph(cluster)
|
||||
|
||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
||||
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
||||
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a PDU graph for a given room by talking "
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--prefix", dest="prefix",
|
||||
"-p",
|
||||
"--prefix",
|
||||
dest="prefix",
|
||||
help="String to prefix output files with",
|
||||
default="graph_output"
|
||||
default="graph_output",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--limit",
|
||||
help="Only retrieve the last N events.",
|
||||
)
|
||||
parser.add_argument('db')
|
||||
parser.add_argument('room')
|
||||
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||
parser.add_argument("db")
|
||||
parser.add_argument("room")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import print_function
|
||||
|
||||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@@ -26,22 +28,22 @@ from six import string_types
|
||||
|
||||
|
||||
def make_graph(file_name, room_id, file_prefix, limit):
|
||||
print "Reading lines"
|
||||
print("Reading lines")
|
||||
with open(file_name) as f:
|
||||
lines = f.readlines()
|
||||
|
||||
print "Read lines"
|
||||
print("Read lines")
|
||||
|
||||
events = [FrozenEvent(json.loads(line)) for line in lines]
|
||||
|
||||
print "Loaded events."
|
||||
print("Loaded events.")
|
||||
|
||||
events.sort(key=lambda e: e.depth)
|
||||
|
||||
print "Sorted events"
|
||||
print("Sorted events")
|
||||
|
||||
if limit:
|
||||
events = events[-int(limit):]
|
||||
events = events[-int(limit) :]
|
||||
|
||||
node_map = {}
|
||||
|
||||
@@ -50,12 +52,12 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
||||
for event in events:
|
||||
t = datetime.datetime.fromtimestamp(
|
||||
float(event.origin_server_ts) / 1000
|
||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
||||
|
||||
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
||||
content = content.replace("\n", "<br/>\n")
|
||||
|
||||
print content
|
||||
print(content)
|
||||
content = []
|
||||
for key, value in unfreeze(event.get_dict()["content"]).items():
|
||||
if value is None:
|
||||
@@ -66,15 +68,16 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
||||
value = json.dumps(value)
|
||||
|
||||
content.append(
|
||||
"<b>%s</b>: %s," % (
|
||||
cgi.escape(key, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
||||
cgi.escape(value, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
||||
"<b>%s</b>: %s,"
|
||||
% (
|
||||
cgi.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||
cgi.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||
)
|
||||
)
|
||||
|
||||
content = "<br/>\n".join(content)
|
||||
|
||||
print content
|
||||
print(content)
|
||||
|
||||
label = (
|
||||
"<"
|
||||
@@ -94,25 +97,19 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
||||
"depth": event.depth,
|
||||
}
|
||||
|
||||
node = pydot.Node(
|
||||
name=event.event_id,
|
||||
label=label,
|
||||
)
|
||||
node = pydot.Node(name=event.event_id, label=label)
|
||||
|
||||
node_map[event.event_id] = node
|
||||
graph.add_node(node)
|
||||
|
||||
print "Created Nodes"
|
||||
print("Created Nodes")
|
||||
|
||||
for event in events:
|
||||
for prev_id, _ in event.prev_events:
|
||||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except:
|
||||
end_node = pydot.Node(
|
||||
name=prev_id,
|
||||
label="<<b>%s</b>>" % (prev_id,),
|
||||
)
|
||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
graph.add_node(end_node)
|
||||
@@ -120,33 +117,33 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
||||
edge = pydot.Edge(node_map[event.event_id], end_node)
|
||||
graph.add_edge(edge)
|
||||
|
||||
print "Created edges"
|
||||
print("Created edges")
|
||||
|
||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
||||
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
||||
|
||||
print "Created Dot"
|
||||
print("Created Dot")
|
||||
|
||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
||||
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
||||
|
||||
print("Created svg")
|
||||
|
||||
print "Created svg"
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a PDU graph for a given room by reading "
|
||||
"from a file with line deliminated events. \n"
|
||||
"Requires pydot."
|
||||
"from a file with line deliminated events. \n"
|
||||
"Requires pydot."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--prefix", dest="prefix",
|
||||
"-p",
|
||||
"--prefix",
|
||||
dest="prefix",
|
||||
help="String to prefix output files with",
|
||||
default="graph_output"
|
||||
default="graph_output",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--limit",
|
||||
help="Only retrieve the last N events.",
|
||||
)
|
||||
parser.add_argument('event_file')
|
||||
parser.add_argument('room')
|
||||
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||
parser.add_argument("event_file")
|
||||
parser.add_argument("room")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
@@ -8,8 +8,9 @@ we set the remote SDP at which point the stream ends. Our video never gets to
|
||||
the bridge.
|
||||
|
||||
Requires:
|
||||
npm install jquery jsdom
|
||||
npm install jquery jsdom
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import gevent
|
||||
import grequests
|
||||
@@ -19,24 +20,25 @@ import urllib
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
#ACCESS_TOKEN="" #
|
||||
# ACCESS_TOKEN="" #
|
||||
|
||||
MATRIXBASE = 'https://matrix.org/_matrix/client/api/v1/'
|
||||
MYUSERNAME = '@davetest:matrix.org'
|
||||
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||
MYUSERNAME = "@davetest:matrix.org"
|
||||
|
||||
HTTPBIND = 'https://meet.jit.si/http-bind'
|
||||
#HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||
#ROOMNAME = "matrix"
|
||||
HTTPBIND = "https://meet.jit.si/http-bind"
|
||||
# HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||
# ROOMNAME = "matrix"
|
||||
ROOMNAME = "pibble"
|
||||
|
||||
HOST="guest.jit.si"
|
||||
#HOST="jitsi.vuc.me"
|
||||
HOST = "guest.jit.si"
|
||||
# HOST="jitsi.vuc.me"
|
||||
|
||||
TURNSERVER="turn.guest.jit.si"
|
||||
#TURNSERVER="turn.jitsi.vuc.me"
|
||||
TURNSERVER = "turn.guest.jit.si"
|
||||
# TURNSERVER="turn.jitsi.vuc.me"
|
||||
|
||||
ROOMDOMAIN = "meet.jit.si"
|
||||
# ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||
|
||||
ROOMDOMAIN="meet.jit.si"
|
||||
#ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||
|
||||
class TrivialMatrixClient:
|
||||
def __init__(self, access_token):
|
||||
@@ -45,38 +47,50 @@ class TrivialMatrixClient:
|
||||
|
||||
def getEvent(self):
|
||||
while True:
|
||||
url = MATRIXBASE+'events?access_token='+self.access_token+"&timeout=60000"
|
||||
url = (
|
||||
MATRIXBASE
|
||||
+ "events?access_token="
|
||||
+ self.access_token
|
||||
+ "&timeout=60000"
|
||||
)
|
||||
if self.token:
|
||||
url += "&from="+self.token
|
||||
url += "&from=" + self.token
|
||||
req = grequests.get(url)
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print "incoming from matrix",obj
|
||||
if 'end' not in obj:
|
||||
print("incoming from matrix", obj)
|
||||
if "end" not in obj:
|
||||
continue
|
||||
self.token = obj['end']
|
||||
if len(obj['chunk']):
|
||||
return obj['chunk'][0]
|
||||
self.token = obj["end"]
|
||||
if len(obj["chunk"]):
|
||||
return obj["chunk"][0]
|
||||
|
||||
def joinRoom(self, roomId):
|
||||
url = MATRIXBASE+'rooms/'+roomId+'/join?access_token='+self.access_token
|
||||
print url
|
||||
headers={ 'Content-Type': 'application/json' }
|
||||
req = grequests.post(url, headers=headers, data='{}')
|
||||
url = MATRIXBASE + "rooms/" + roomId + "/join?access_token=" + self.access_token
|
||||
print(url)
|
||||
headers = {"Content-Type": "application/json"}
|
||||
req = grequests.post(url, headers=headers, data="{}")
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print "response: ",obj
|
||||
print("response: ", obj)
|
||||
|
||||
def sendEvent(self, roomId, evType, event):
|
||||
url = MATRIXBASE+'rooms/'+roomId+'/send/'+evType+'?access_token='+self.access_token
|
||||
print url
|
||||
print json.dumps(event)
|
||||
headers={ 'Content-Type': 'application/json' }
|
||||
url = (
|
||||
MATRIXBASE
|
||||
+ "rooms/"
|
||||
+ roomId
|
||||
+ "/send/"
|
||||
+ evType
|
||||
+ "?access_token="
|
||||
+ self.access_token
|
||||
)
|
||||
print(url)
|
||||
print(json.dumps(event))
|
||||
headers = {"Content-Type": "application/json"}
|
||||
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print "response: ",obj
|
||||
|
||||
print("response: ", obj)
|
||||
|
||||
|
||||
xmppClients = {}
|
||||
@@ -85,39 +99,40 @@ xmppClients = {}
|
||||
def matrixLoop():
|
||||
while True:
|
||||
ev = matrixCli.getEvent()
|
||||
print ev
|
||||
if ev['type'] == 'm.room.member':
|
||||
print 'membership event'
|
||||
if ev['membership'] == 'invite' and ev['state_key'] == MYUSERNAME:
|
||||
roomId = ev['room_id']
|
||||
print "joining room %s" % (roomId)
|
||||
print(ev)
|
||||
if ev["type"] == "m.room.member":
|
||||
print("membership event")
|
||||
if ev["membership"] == "invite" and ev["state_key"] == MYUSERNAME:
|
||||
roomId = ev["room_id"]
|
||||
print("joining room %s" % (roomId))
|
||||
matrixCli.joinRoom(roomId)
|
||||
elif ev['type'] == 'm.room.message':
|
||||
if ev['room_id'] in xmppClients:
|
||||
print "already have a bridge for that user, ignoring"
|
||||
elif ev["type"] == "m.room.message":
|
||||
if ev["room_id"] in xmppClients:
|
||||
print("already have a bridge for that user, ignoring")
|
||||
continue
|
||||
print "got message, connecting"
|
||||
xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev['type'] == 'm.call.invite':
|
||||
print "Incoming call"
|
||||
#sdp = ev['content']['offer']['sdp']
|
||||
#print "sdp: %s" % (sdp)
|
||||
#xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
#gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev['type'] == 'm.call.answer':
|
||||
print "Call answered"
|
||||
sdp = ev['content']['answer']['sdp']
|
||||
if ev['room_id'] not in xmppClients:
|
||||
print "We didn't have a call for that room"
|
||||
print("got message, connecting")
|
||||
xmppClients[ev["room_id"]] = TrivialXmppClient(ev["room_id"], ev["user_id"])
|
||||
gevent.spawn(xmppClients[ev["room_id"]].xmppLoop)
|
||||
elif ev["type"] == "m.call.invite":
|
||||
print("Incoming call")
|
||||
# sdp = ev['content']['offer']['sdp']
|
||||
# print "sdp: %s" % (sdp)
|
||||
# xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
# gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev["type"] == "m.call.answer":
|
||||
print("Call answered")
|
||||
sdp = ev["content"]["answer"]["sdp"]
|
||||
if ev["room_id"] not in xmppClients:
|
||||
print("We didn't have a call for that room")
|
||||
continue
|
||||
# should probably check call ID too
|
||||
xmppCli = xmppClients[ev['room_id']]
|
||||
xmppCli = xmppClients[ev["room_id"]]
|
||||
xmppCli.sendAnswer(sdp)
|
||||
elif ev['type'] == 'm.call.hangup':
|
||||
if ev['room_id'] in xmppClients:
|
||||
xmppClients[ev['room_id']].stop()
|
||||
del xmppClients[ev['room_id']]
|
||||
elif ev["type"] == "m.call.hangup":
|
||||
if ev["room_id"] in xmppClients:
|
||||
xmppClients[ev["room_id"]].stop()
|
||||
del xmppClients[ev["room_id"]]
|
||||
|
||||
|
||||
class TrivialXmppClient:
|
||||
def __init__(self, matrixRoom, userId):
|
||||
@@ -131,130 +146,155 @@ class TrivialXmppClient:
|
||||
|
||||
def nextRid(self):
|
||||
self.rid += 1
|
||||
return '%d' % (self.rid)
|
||||
return "%d" % (self.rid)
|
||||
|
||||
def sendIq(self, xml):
|
||||
fullXml = "<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>" % (self.nextRid(), self.sid, xml)
|
||||
#print "\t>>>%s" % (fullXml)
|
||||
fullXml = (
|
||||
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>"
|
||||
% (self.nextRid(), self.sid, xml)
|
||||
)
|
||||
# print "\t>>>%s" % (fullXml)
|
||||
return self.xmppPoke(fullXml)
|
||||
|
||||
def xmppPoke(self, xml):
|
||||
headers = {'Content-Type': 'application/xml'}
|
||||
headers = {"Content-Type": "application/xml"}
|
||||
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||
resps = grequests.map([req])
|
||||
obj = BeautifulSoup(resps[0].content)
|
||||
return obj
|
||||
|
||||
def sendAnswer(self, answer):
|
||||
print "sdp from matrix client",answer
|
||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--sdp'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
print("sdp from matrix client", answer)
|
||||
p = subprocess.Popen(
|
||||
["node", "unjingle/unjingle.js", "--sdp"],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
jingle, out_err = p.communicate(answer)
|
||||
jingle = jingle % {
|
||||
'tojid': self.callfrom,
|
||||
'action': 'session-accept',
|
||||
'initiator': self.callfrom,
|
||||
'responder': self.jid,
|
||||
'sid': self.callsid
|
||||
"tojid": self.callfrom,
|
||||
"action": "session-accept",
|
||||
"initiator": self.callfrom,
|
||||
"responder": self.jid,
|
||||
"sid": self.callsid,
|
||||
}
|
||||
print "answer jingle from sdp",jingle
|
||||
print("answer jingle from sdp", jingle)
|
||||
res = self.sendIq(jingle)
|
||||
print "reply from answer: ",res
|
||||
print("reply from answer: ", res)
|
||||
|
||||
self.ssrcs = {}
|
||||
jingleSoup = BeautifulSoup(jingle)
|
||||
for cont in jingleSoup.iq.jingle.findAll('content'):
|
||||
for cont in jingleSoup.iq.jingle.findAll("content"):
|
||||
if cont.description:
|
||||
self.ssrcs[cont['name']] = cont.description['ssrc']
|
||||
print "my ssrcs:",self.ssrcs
|
||||
self.ssrcs[cont["name"]] = cont.description["ssrc"]
|
||||
print("my ssrcs:", self.ssrcs)
|
||||
|
||||
gevent.joinall([
|
||||
gevent.spawn(self.advertiseSsrcs)
|
||||
])
|
||||
gevent.joinall([gevent.spawn(self.advertiseSsrcs)])
|
||||
|
||||
def advertiseSsrcs(self):
|
||||
time.sleep(7)
|
||||
print "SSRC spammer started"
|
||||
time.sleep(7)
|
||||
print("SSRC spammer started")
|
||||
while self.running:
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % { 'tojid': "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid), 'nick': self.userId, 'assrc': self.ssrcs['audio'], 'vssrc': self.ssrcs['video'] }
|
||||
ssrcMsg = (
|
||||
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||
% {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
)
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print "reply from ssrc announce: ",res
|
||||
print("reply from ssrc announce: ", res)
|
||||
time.sleep(10)
|
||||
|
||||
|
||||
|
||||
def xmppLoop(self):
|
||||
self.matrixCallId = time.time()
|
||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), HOST))
|
||||
res = self.xmppPoke(
|
||||
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||
% (self.nextRid(), HOST)
|
||||
)
|
||||
|
||||
print res
|
||||
self.sid = res.body['sid']
|
||||
print "sid %s" % (self.sid)
|
||||
print(res)
|
||||
self.sid = res.body["sid"]
|
||||
print("sid %s" % (self.sid))
|
||||
|
||||
res = self.sendIq("<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>")
|
||||
res = self.sendIq(
|
||||
"<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>"
|
||||
)
|
||||
|
||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), self.sid, HOST))
|
||||
res = self.xmppPoke(
|
||||
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||
% (self.nextRid(), self.sid, HOST)
|
||||
)
|
||||
|
||||
res = self.sendIq("<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>")
|
||||
print res
|
||||
res = self.sendIq(
|
||||
"<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>"
|
||||
)
|
||||
print(res)
|
||||
|
||||
self.jid = res.body.iq.bind.jid.string
|
||||
print "jid: %s" % (self.jid)
|
||||
self.shortJid = self.jid.split('-')[0]
|
||||
print("jid: %s" % (self.jid))
|
||||
self.shortJid = self.jid.split("-")[0]
|
||||
|
||||
res = self.sendIq("<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>")
|
||||
res = self.sendIq(
|
||||
"<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>"
|
||||
)
|
||||
|
||||
#randomthing = res.body.iq['to']
|
||||
#whatsitpart = randomthing.split('-')[0]
|
||||
# randomthing = res.body.iq['to']
|
||||
# whatsitpart = randomthing.split('-')[0]
|
||||
|
||||
#print "other random bind thing: %s" % (randomthing)
|
||||
# print "other random bind thing: %s" % (randomthing)
|
||||
|
||||
# advertise preence to the jitsi room, with our nick
|
||||
res = self.sendIq("<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>" % (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId))
|
||||
self.muc = {'users': []}
|
||||
for p in res.body.findAll('presence'):
|
||||
res = self.sendIq(
|
||||
"<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>"
|
||||
% (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId)
|
||||
)
|
||||
self.muc = {"users": []}
|
||||
for p in res.body.findAll("presence"):
|
||||
u = {}
|
||||
u['shortJid'] = p['from'].split('/')[1]
|
||||
u["shortJid"] = p["from"].split("/")[1]
|
||||
if p.c and p.c.nick:
|
||||
u['nick'] = p.c.nick.string
|
||||
self.muc['users'].append(u)
|
||||
print "muc: ",self.muc
|
||||
u["nick"] = p.c.nick.string
|
||||
self.muc["users"].append(u)
|
||||
print("muc: ", self.muc)
|
||||
|
||||
# wait for stuff
|
||||
while True:
|
||||
print "waiting..."
|
||||
print("waiting...")
|
||||
res = self.sendIq("")
|
||||
print "got from stream: ",res
|
||||
print("got from stream: ", res)
|
||||
if res.body.iq:
|
||||
jingles = res.body.iq.findAll('jingle')
|
||||
jingles = res.body.iq.findAll("jingle")
|
||||
if len(jingles):
|
||||
self.callfrom = res.body.iq['from']
|
||||
self.callfrom = res.body.iq["from"]
|
||||
self.handleInvite(jingles[0])
|
||||
elif 'type' in res.body and res.body['type'] == 'terminate':
|
||||
elif "type" in res.body and res.body["type"] == "terminate":
|
||||
self.running = False
|
||||
del xmppClients[self.matrixRoom]
|
||||
return
|
||||
return
|
||||
|
||||
def handleInvite(self, jingle):
|
||||
self.initiator = jingle['initiator']
|
||||
self.callsid = jingle['sid']
|
||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--jingle'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
print "raw jingle invite",str(jingle)
|
||||
self.initiator = jingle["initiator"]
|
||||
self.callsid = jingle["sid"]
|
||||
p = subprocess.Popen(
|
||||
["node", "unjingle/unjingle.js", "--jingle"],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
print("raw jingle invite", str(jingle))
|
||||
sdp, out_err = p.communicate(str(jingle))
|
||||
print "transformed remote offer sdp",sdp
|
||||
print("transformed remote offer sdp", sdp)
|
||||
inviteEvent = {
|
||||
'offer': {
|
||||
'type': 'offer',
|
||||
'sdp': sdp
|
||||
},
|
||||
'call_id': self.matrixCallId,
|
||||
'version': 0,
|
||||
'lifetime': 30000
|
||||
"offer": {"type": "offer", "sdp": sdp},
|
||||
"call_id": self.matrixCallId,
|
||||
"version": 0,
|
||||
"lifetime": 30000,
|
||||
}
|
||||
matrixCli.sendEvent(self.matrixRoom, 'm.call.invite', inviteEvent)
|
||||
matrixCli.sendEvent(self.matrixRoom, "m.call.invite", inviteEvent)
|
||||
|
||||
matrixCli = TrivialMatrixClient(ACCESS_TOKEN)
|
||||
|
||||
gevent.joinall([
|
||||
gevent.spawn(matrixLoop)
|
||||
])
|
||||
matrixCli = TrivialMatrixClient(ACCESS_TOKEN) # Undefined name
|
||||
|
||||
gevent.joinall([gevent.spawn(matrixLoop)])
|
||||
|
||||
@@ -3,7 +3,7 @@ Purge history API examples
|
||||
|
||||
# `purge_history.sh`
|
||||
|
||||
A bash file, that uses the [purge history API](/docs/admin_api/README.rst) to
|
||||
A bash file, that uses the [purge history API](/docs/admin_api/purge_history_api.rst) to
|
||||
purge all messages in a list of rooms up to a certain event. You can select a
|
||||
timeframe or a number of messages that you want to keep in the room.
|
||||
|
||||
@@ -12,5 +12,5 @@ the script.
|
||||
|
||||
# `purge_remote_media.sh`
|
||||
|
||||
A bash file, that uses the [purge history API](/docs/admin_api/README.rst) to
|
||||
A bash file, that uses the [purge history API](/docs/admin_api/purge_history_api.rst) to
|
||||
purge all old cached remote media.
|
||||
|
||||
@@ -1,34 +1,40 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
from argparse import ArgumentParser
|
||||
import json
|
||||
import requests
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
try:
|
||||
raw_input
|
||||
except NameError: # Python 3
|
||||
raw_input = input
|
||||
|
||||
|
||||
def _mkurl(template, kws):
|
||||
for key in kws:
|
||||
template = template.replace(key, kws[key])
|
||||
return template
|
||||
|
||||
|
||||
def main(hs, room_id, access_token, user_id_prefix, why):
|
||||
if not why:
|
||||
why = "Automated kick."
|
||||
print "Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix)
|
||||
print(
|
||||
"Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix)
|
||||
)
|
||||
room_state_url = _mkurl(
|
||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state?access_token=$TOKEN",
|
||||
{
|
||||
"$HS": hs,
|
||||
"$ROOM": room_id,
|
||||
"$TOKEN": access_token
|
||||
}
|
||||
{"$HS": hs, "$ROOM": room_id, "$TOKEN": access_token},
|
||||
)
|
||||
print "Getting room state => %s" % room_state_url
|
||||
print("Getting room state => %s" % room_state_url)
|
||||
res = requests.get(room_state_url)
|
||||
print "HTTP %s" % res.status_code
|
||||
print("HTTP %s" % res.status_code)
|
||||
state_events = res.json()
|
||||
if "error" in state_events:
|
||||
print "FATAL"
|
||||
print state_events
|
||||
print("FATAL")
|
||||
print(state_events)
|
||||
return
|
||||
|
||||
kick_list = []
|
||||
@@ -44,47 +50,40 @@ def main(hs, room_id, access_token, user_id_prefix, why):
|
||||
kick_list.append(event["state_key"])
|
||||
|
||||
if len(kick_list) == 0:
|
||||
print "No user IDs match the prefix '%s'" % user_id_prefix
|
||||
print("No user IDs match the prefix '%s'" % user_id_prefix)
|
||||
return
|
||||
|
||||
print "The following user IDs will be kicked from %s" % room_name
|
||||
print("The following user IDs will be kicked from %s" % room_name)
|
||||
for uid in kick_list:
|
||||
print uid
|
||||
print(uid)
|
||||
doit = raw_input("Continue? [Y]es\n")
|
||||
if len(doit) > 0 and doit.lower() == 'y':
|
||||
print "Kicking members..."
|
||||
if len(doit) > 0 and doit.lower() == "y":
|
||||
print("Kicking members...")
|
||||
# encode them all
|
||||
kick_list = [urllib.quote(uid) for uid in kick_list]
|
||||
for uid in kick_list:
|
||||
kick_url = _mkurl(
|
||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state/m.room.member/$UID?access_token=$TOKEN",
|
||||
{
|
||||
"$HS": hs,
|
||||
"$UID": uid,
|
||||
"$ROOM": room_id,
|
||||
"$TOKEN": access_token
|
||||
}
|
||||
{"$HS": hs, "$UID": uid, "$ROOM": room_id, "$TOKEN": access_token},
|
||||
)
|
||||
kick_body = {
|
||||
"membership": "leave",
|
||||
"reason": why
|
||||
}
|
||||
print "Kicking %s" % uid
|
||||
kick_body = {"membership": "leave", "reason": why}
|
||||
print("Kicking %s" % uid)
|
||||
res = requests.put(kick_url, data=json.dumps(kick_body))
|
||||
if res.status_code != 200:
|
||||
print "ERROR: HTTP %s" % res.status_code
|
||||
print("ERROR: HTTP %s" % res.status_code)
|
||||
if res.json().get("error"):
|
||||
print "ERROR: JSON %s" % res.json()
|
||||
|
||||
|
||||
print("ERROR: JSON %s" % res.json())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser("Kick members in a room matching a certain user ID prefix.")
|
||||
parser.add_argument("-u","--user-id",help="The user ID prefix e.g. '@irc_'")
|
||||
parser.add_argument("-t","--token",help="Your access_token")
|
||||
parser.add_argument("-r","--room",help="The room ID to kick members in")
|
||||
parser.add_argument("-s","--homeserver",help="The base HS url e.g. http://matrix.org")
|
||||
parser.add_argument("-w","--why",help="Reason for the kick. Optional.")
|
||||
parser.add_argument("-u", "--user-id", help="The user ID prefix e.g. '@irc_'")
|
||||
parser.add_argument("-t", "--token", help="Your access_token")
|
||||
parser.add_argument("-r", "--room", help="The room ID to kick members in")
|
||||
parser.add_argument(
|
||||
"-s", "--homeserver", help="The base HS url e.g. http://matrix.org"
|
||||
)
|
||||
parser.add_argument("-w", "--why", help="Reason for the kick. Optional.")
|
||||
args = parser.parse_args()
|
||||
if not args.room or not args.token or not args.user_id or not args.homeserver:
|
||||
parser.print_help()
|
||||
|
||||
@@ -12,6 +12,7 @@ ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.%i --config-path=/
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
SyslogIdentifier=matrix-synapse-%i
|
||||
|
||||
[Install]
|
||||
WantedBy=matrix-synapse.service
|
||||
|
||||
@@ -11,6 +11,7 @@ ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --confi
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
SyslogIdentifier=matrix-synapse
|
||||
|
||||
[Install]
|
||||
WantedBy=matrix.target
|
||||
|
||||
@@ -22,10 +22,10 @@ Group=nogroup
|
||||
|
||||
WorkingDirectory=/opt/synapse
|
||||
ExecStart=/opt/synapse/env/bin/python -m synapse.app.homeserver --config-path=/opt/synapse/homeserver.yaml
|
||||
SyslogIdentifier=matrix-synapse
|
||||
|
||||
# adjust the cache factor if necessary
|
||||
# Environment=SYNAPSE_CACHE_FACTOR=2.0
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
|
||||
2
debian/build_virtualenv
vendored
2
debian/build_virtualenv
vendored
@@ -43,7 +43,7 @@ dh_virtualenv \
|
||||
--preinstall="mock" \
|
||||
--extra-pip-arg="--no-cache-dir" \
|
||||
--extra-pip-arg="--compile" \
|
||||
--extras="all"
|
||||
--extras="all,systemd"
|
||||
|
||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||
|
||||
50
debian/changelog
vendored
50
debian/changelog
vendored
@@ -1,3 +1,53 @@
|
||||
matrix-synapse-py3 (1.1.0) stable; urgency=medium
|
||||
|
||||
[ Silke Hofstra ]
|
||||
* Include systemd-python to allow logging to the systemd journal.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.1.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 04 Jul 2019 11:43:41 +0100
|
||||
|
||||
matrix-synapse-py3 (1.0.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.0.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Jun 2019 17:09:53 +0100
|
||||
|
||||
matrix-synapse-py3 (0.99.5.2) stable; urgency=medium
|
||||
|
||||
* New synapse release 0.99.5.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 30 May 2019 16:28:07 +0100
|
||||
|
||||
matrix-synapse-py3 (0.99.5.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 0.99.5.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 22 May 2019 16:22:24 +0000
|
||||
|
||||
matrix-synapse-py3 (0.99.4) stable; urgency=medium
|
||||
|
||||
[ Christoph Müller ]
|
||||
* Configure the systemd units to have a log identifier of `matrix-synapse`
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 0.99.4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 15 May 2019 13:58:08 +0100
|
||||
|
||||
matrix-synapse-py3 (0.99.3.2) stable; urgency=medium
|
||||
|
||||
* New synapse release 0.99.3.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 03 May 2019 18:56:20 +0100
|
||||
|
||||
matrix-synapse-py3 (0.99.3.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 0.99.3.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 03 May 2019 16:02:43 +0100
|
||||
|
||||
matrix-synapse-py3 (0.99.3) stable; urgency=medium
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
|
||||
1
debian/matrix-synapse.service
vendored
1
debian/matrix-synapse.service
vendored
@@ -11,6 +11,7 @@ ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --confi
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
SyslogIdentifier=matrix-synapse
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
2
debian/test/.gitignore
vendored
Normal file
2
debian/test/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
.vagrant
|
||||
*.log
|
||||
23
debian/test/provision.sh
vendored
Normal file
23
debian/test/provision.sh
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# provisioning script for vagrant boxes for testing the matrix-synapse debs.
|
||||
#
|
||||
# Will install the most recent matrix-synapse-py3 deb for this platform from
|
||||
# the /debs directory.
|
||||
|
||||
set -e
|
||||
|
||||
apt-get update
|
||||
apt-get install -y lsb-release
|
||||
|
||||
deb=`ls /debs/matrix-synapse-py3_*+$(lsb_release -cs)*.deb | sort | tail -n1`
|
||||
|
||||
debconf-set-selections <<EOF
|
||||
matrix-synapse matrix-synapse/report-stats boolean false
|
||||
matrix-synapse matrix-synapse/server-name string localhost:18448
|
||||
EOF
|
||||
|
||||
dpkg -i "$deb"
|
||||
|
||||
sed -i -e '/port: 8...$/{s/8448/18448/; s/8008/18008/}' -e '$aregistration_shared_secret: secret' /etc/matrix-synapse/homeserver.yaml
|
||||
systemctl restart matrix-synapse
|
||||
13
debian/test/stretch/Vagrantfile
vendored
Normal file
13
debian/test/stretch/Vagrantfile
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
ver = `cd ../../..; dpkg-parsechangelog -S Version`.strip()
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
config.vm.box = "debian/stretch64"
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
config.vm.synced_folder "../../../../debs", "/debs", type: "nfs"
|
||||
|
||||
config.vm.provision "shell", path: "../provision.sh"
|
||||
end
|
||||
10
debian/test/xenial/Vagrantfile
vendored
Normal file
10
debian/test/xenial/Vagrantfile
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
config.vm.box = "ubuntu/xenial64"
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
config.vm.synced_folder "../../../../debs", "/debs"
|
||||
config.vm.provision "shell", path: "../provision.sh"
|
||||
end
|
||||
@@ -1,9 +1,13 @@
|
||||
DO NOT USE THESE DEMO SERVERS IN PRODUCTION
|
||||
|
||||
Requires you to have done:
|
||||
python setup.py develop
|
||||
|
||||
|
||||
The demo start.sh will start three synapse servers on ports 8080, 8081 and 8082, with host names localhost:$port. This can be easily changed to `hostname`:$port in start.sh if required.
|
||||
It will also start a web server on port 8000 pointed at the webclient.
|
||||
The demo start.sh will start three synapse servers on ports 8080, 8081 and 8082, with host names localhost:$port. This can be easily changed to `hostname`:$port in start.sh if required.
|
||||
|
||||
To enable the servers to communicate untrusted ssl certs are used. In order to do this the servers do not check the certs
|
||||
and are configured in a highly insecure way. Do not use these configuration files in production.
|
||||
|
||||
stop.sh will stop the synapse servers and the webclient.
|
||||
|
||||
|
||||
@@ -21,14 +21,76 @@ for port in 8080 8081 8082; do
|
||||
pushd demo/$port
|
||||
|
||||
#rm $DIR/etc/$port.config
|
||||
python -m synapse.app.homeserver \
|
||||
python3 -m synapse.app.homeserver \
|
||||
--generate-config \
|
||||
-H "localhost:$https_port" \
|
||||
--config-path "$DIR/etc/$port.config" \
|
||||
--report-stats no
|
||||
|
||||
printf '\n\n# Customisation made by demo/start.sh\n' >> $DIR/etc/$port.config
|
||||
echo 'enable_registration: true' >> $DIR/etc/$port.config
|
||||
if ! grep -F "Customisation made by demo/start.sh" -q $DIR/etc/$port.config; then
|
||||
printf '\n\n# Customisation made by demo/start.sh\n' >> $DIR/etc/$port.config
|
||||
|
||||
echo 'enable_registration: true' >> $DIR/etc/$port.config
|
||||
|
||||
# Warning, this heredoc depends on the interaction of tabs and spaces. Please don't
|
||||
# accidentaly bork me with your fancy settings.
|
||||
listeners=$(cat <<-PORTLISTENERS
|
||||
# Configure server to listen on both $https_port and $port
|
||||
# This overides some of the default settings above
|
||||
listeners:
|
||||
- port: $https_port
|
||||
type: http
|
||||
tls: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
|
||||
- port: $port
|
||||
tls: false
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
type: http
|
||||
x_forwarded: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
compress: false
|
||||
PORTLISTENERS
|
||||
)
|
||||
echo "${listeners}" >> $DIR/etc/$port.config
|
||||
|
||||
# Disable tls for the servers
|
||||
printf '\n\n# Disable tls on the servers.' >> $DIR/etc/$port.config
|
||||
echo '# DO NOT USE IN PRODUCTION' >> $DIR/etc/$port.config
|
||||
echo 'use_insecure_ssl_client_just_for_testing_do_not_use: true' >> $DIR/etc/$port.config
|
||||
echo 'federation_verify_certificates: false' >> $DIR/etc/$port.config
|
||||
|
||||
# Set tls paths
|
||||
echo "tls_certificate_path: \"$DIR/etc/localhost:$https_port.tls.crt\"" >> $DIR/etc/$port.config
|
||||
echo "tls_private_key_path: \"$DIR/etc/localhost:$https_port.tls.key\"" >> $DIR/etc/$port.config
|
||||
|
||||
# Generate tls keys
|
||||
openssl req -x509 -newkey rsa:4096 -keyout $DIR/etc/localhost\:$https_port.tls.key -out $DIR/etc/localhost\:$https_port.tls.crt -days 365 -nodes -subj "/O=matrix"
|
||||
|
||||
# Ignore keys from the trusted keys server
|
||||
echo '# Ignore keys from the trusted keys server' >> $DIR/etc/$port.config
|
||||
echo 'trusted_key_servers:' >> $DIR/etc/$port.config
|
||||
echo ' - server_name: "matrix.org"' >> $DIR/etc/$port.config
|
||||
echo ' accept_keys_insecurely: true' >> $DIR/etc/$port.config
|
||||
|
||||
# Reduce the blacklist
|
||||
blacklist=$(cat <<-BLACK
|
||||
# Set the blacklist so that it doesn't include 127.0.0.1
|
||||
federation_ip_range_blacklist:
|
||||
- '10.0.0.0/8'
|
||||
- '172.16.0.0/12'
|
||||
- '192.168.0.0/16'
|
||||
- '100.64.0.0/10'
|
||||
- '169.254.0.0/16'
|
||||
- '::1/128'
|
||||
- 'fe80::/64'
|
||||
- 'fc00::/7'
|
||||
BLACK
|
||||
)
|
||||
echo "${blacklist}" >> $DIR/etc/$port.config
|
||||
fi
|
||||
|
||||
# Check script parameters
|
||||
if [ $# -eq 1 ]; then
|
||||
@@ -55,7 +117,7 @@ for port in 8080 8081 8082; do
|
||||
echo "report_stats: false" >> $DIR/etc/$port.config
|
||||
fi
|
||||
|
||||
python -m synapse.app.homeserver \
|
||||
python3 -m synapse.app.homeserver \
|
||||
--config-path "$DIR/etc/$port.config" \
|
||||
-D \
|
||||
-vv \
|
||||
|
||||
@@ -6,23 +6,25 @@ import cgi, logging
|
||||
|
||||
from daemonize import Daemonize
|
||||
|
||||
|
||||
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||
UPLOAD_PATH = "upload"
|
||||
|
||||
"""
|
||||
Accept all post request as file upload
|
||||
"""
|
||||
|
||||
def do_POST(self):
|
||||
|
||||
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
|
||||
length = self.headers['content-length']
|
||||
length = self.headers["content-length"]
|
||||
data = self.rfile.read(int(length))
|
||||
|
||||
with open(path, 'wb') as fh:
|
||||
with open(path, "wb") as fh:
|
||||
fh.write(data)
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Type', 'application/json')
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.end_headers()
|
||||
|
||||
# Return the absolute path of the uploaded file
|
||||
@@ -33,30 +35,25 @@ def setup():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("directory")
|
||||
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
|
||||
parser.add_argument('-P', "--pid-file", dest="pid", default="web.pid")
|
||||
parser.add_argument("-P", "--pid-file", dest="pid", default="web.pid")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get absolute path to directory to serve, as daemonize changes to '/'
|
||||
os.chdir(args.directory)
|
||||
dr = os.getcwd()
|
||||
|
||||
httpd = BaseHTTPServer.HTTPServer(
|
||||
('', args.port),
|
||||
SimpleHTTPRequestHandlerWithPOST
|
||||
)
|
||||
httpd = BaseHTTPServer.HTTPServer(("", args.port), SimpleHTTPRequestHandlerWithPOST)
|
||||
|
||||
def run():
|
||||
os.chdir(dr)
|
||||
httpd.serve_forever()
|
||||
|
||||
daemon = Daemonize(
|
||||
app="synapse-webclient",
|
||||
pid=args.pid,
|
||||
action=run,
|
||||
auto_close_fds=False,
|
||||
)
|
||||
app="synapse-webclient", pid=args.pid, action=run, auto_close_fds=False
|
||||
)
|
||||
|
||||
daemon.start()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.6 .
|
||||
#
|
||||
|
||||
ARG PYTHON_VERSION=2
|
||||
ARG PYTHON_VERSION=3.7
|
||||
|
||||
###
|
||||
### Stage 0: builder
|
||||
@@ -57,6 +57,7 @@ RUN pip install --prefix="/install" --no-warn-script-location \
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
|
||||
|
||||
# xmlsec is required for saml support
|
||||
RUN apk add --no-cache --virtual .runtime_deps \
|
||||
libffi \
|
||||
libjpeg-turbo \
|
||||
@@ -64,7 +65,9 @@ RUN apk add --no-cache --virtual .runtime_deps \
|
||||
libxslt \
|
||||
libpq \
|
||||
zlib \
|
||||
su-exec
|
||||
su-exec \
|
||||
tzdata \
|
||||
xmlsec
|
||||
|
||||
COPY --from=builder /install /usr/local
|
||||
COPY ./docker/start.py /start.py
|
||||
|
||||
@@ -50,12 +50,15 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
debhelper \
|
||||
devscripts \
|
||||
dh-systemd \
|
||||
libsystemd-dev \
|
||||
lsb-release \
|
||||
pkg-config \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
python3-venv \
|
||||
sqlite3
|
||||
sqlite3 \
|
||||
libpq-dev
|
||||
|
||||
COPY --from=builder /dh-virtualenv_1.1-1_all.deb /
|
||||
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
FROM matrixdotorg/sytest:latest
|
||||
|
||||
# The Sytest image doesn't come with python, so install that
|
||||
RUN apt-get -qq install -y python python-dev python-pip
|
||||
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
||||
|
||||
# We need tox to run the tests in run_pg_tests.sh
|
||||
RUN pip install tox
|
||||
RUN python3 -m pip install tox
|
||||
|
||||
ADD run_pg_tests.sh /pg_tests.sh
|
||||
ENTRYPOINT /pg_tests.sh
|
||||
|
||||
228
docker/README.md
228
docker/README.md
@@ -6,39 +6,11 @@ postgres database.
|
||||
|
||||
The image also does *not* provide a TURN server.
|
||||
|
||||
## Run
|
||||
|
||||
### Using docker-compose (easier)
|
||||
|
||||
This image is designed to run either with an automatically generated
|
||||
configuration file or with a custom configuration that requires manual editing.
|
||||
|
||||
An easy way to make use of this image is via docker-compose. See the
|
||||
[contrib/docker](../contrib/docker) section of the synapse project for
|
||||
examples.
|
||||
|
||||
### Without Compose (harder)
|
||||
|
||||
If you do not wish to use Compose, you may still run this image using plain
|
||||
Docker commands. Note that the following is just a guideline and you may need
|
||||
to add parameters to the docker run command to account for the network situation
|
||||
with your postgres database.
|
||||
|
||||
```
|
||||
docker run \
|
||||
-d \
|
||||
--name synapse \
|
||||
--mount type=volume,src=synapse-data,dst=/data \
|
||||
-e SYNAPSE_SERVER_NAME=my.matrix.host \
|
||||
-e SYNAPSE_REPORT_STATS=yes \
|
||||
-p 8448:8448 \
|
||||
matrixdotorg/synapse:latest
|
||||
```
|
||||
|
||||
## Volumes
|
||||
|
||||
The image expects a single volume, located at ``/data``, that will hold:
|
||||
By default, the image expects a single volume, located at ``/data``, that will hold:
|
||||
|
||||
* configuration files;
|
||||
* temporary files during uploads;
|
||||
* uploaded media and thumbnails;
|
||||
* the SQLite database if you do not configure postgres;
|
||||
@@ -53,128 +25,106 @@ In order to setup an application service, simply create an ``appservices``
|
||||
directory in the data volume and write the application service Yaml
|
||||
configuration file there. Multiple application services are supported.
|
||||
|
||||
## TLS certificates
|
||||
## Generating a configuration file
|
||||
|
||||
Synapse requires a valid TLS certificate. You can do one of the following:
|
||||
The first step is to genearte a valid config file. To do this, you can run the
|
||||
image with the `generate` commandline option.
|
||||
|
||||
* Provide your own certificate and key (as
|
||||
`${DATA_PATH}/${SYNAPSE_SERVER_NAME}.tls.crt` and
|
||||
`${DATA_PATH}/${SYNAPSE_SERVER_NAME}.tls.key`, or elsewhere by providing an
|
||||
entire config as `${SYNAPSE_CONFIG_PATH}`). In this case, you should forward
|
||||
traffic to port 8448 in the container, for example with `-p 443:8448`.
|
||||
|
||||
* Use a reverse proxy to terminate incoming TLS, and forward the plain http
|
||||
traffic to port 8008 in the container. In this case you should set `-e
|
||||
SYNAPSE_NO_TLS=1`.
|
||||
|
||||
* Use the ACME (Let's Encrypt) support built into Synapse. This requires
|
||||
`${SYNAPSE_SERVER_NAME}` port 80 to be forwarded to port 8009 in the
|
||||
container, for example with `-p 80:8009`. To enable it in the docker
|
||||
container, set `-e SYNAPSE_ACME=1`.
|
||||
|
||||
If you don't do any of these, Synapse will fail to start with an error similar to:
|
||||
|
||||
synapse.config._base.ConfigError: Error accessing file '/data/<server_name>.tls.crt' (config for tls_certificate): No such file or directory
|
||||
|
||||
## Environment
|
||||
|
||||
Unless you specify a custom path for the configuration file, a very generic
|
||||
file will be generated, based on the following environment settings.
|
||||
These are a good starting point for setting up your own deployment.
|
||||
|
||||
Global settings:
|
||||
|
||||
* ``UID``, the user id Synapse will run as [default 991]
|
||||
* ``GID``, the group id Synapse will run as [default 991]
|
||||
* ``SYNAPSE_CONFIG_PATH``, path to a custom config file
|
||||
|
||||
If ``SYNAPSE_CONFIG_PATH`` is set, you should generate a configuration file
|
||||
then customize it manually: see [Generating a config
|
||||
file](#generating-a-config-file).
|
||||
|
||||
Otherwise, a dynamic configuration file will be used.
|
||||
|
||||
### Environment variables used to build a dynamic configuration file
|
||||
|
||||
The following environment variables are used to build the configuration file
|
||||
when ``SYNAPSE_CONFIG_PATH`` is not set.
|
||||
|
||||
* ``SYNAPSE_SERVER_NAME`` (mandatory), the server public hostname.
|
||||
* ``SYNAPSE_REPORT_STATS``, (mandatory, ``yes`` or ``no``), enable anonymous
|
||||
statistics reporting back to the Matrix project which helps us to get funding.
|
||||
* ``SYNAPSE_NO_TLS``, set this variable to disable TLS in Synapse (use this if
|
||||
you run your own TLS-capable reverse proxy).
|
||||
* ``SYNAPSE_ENABLE_REGISTRATION``, set this variable to enable registration on
|
||||
the Synapse instance.
|
||||
* ``SYNAPSE_ALLOW_GUEST``, set this variable to allow guest joining this server.
|
||||
* ``SYNAPSE_EVENT_CACHE_SIZE``, the event cache size [default `10K`].
|
||||
* ``SYNAPSE_RECAPTCHA_PUBLIC_KEY``, set this variable to the recaptcha public
|
||||
key in order to enable recaptcha upon registration.
|
||||
* ``SYNAPSE_RECAPTCHA_PRIVATE_KEY``, set this variable to the recaptcha private
|
||||
key in order to enable recaptcha upon registration.
|
||||
* ``SYNAPSE_TURN_URIS``, set this variable to the coma-separated list of TURN
|
||||
uris to enable TURN for this homeserver.
|
||||
* ``SYNAPSE_TURN_SECRET``, set this to the TURN shared secret if required.
|
||||
* ``SYNAPSE_MAX_UPLOAD_SIZE``, set this variable to change the max upload size
|
||||
[default `10M`].
|
||||
* ``SYNAPSE_ACME``: set this to enable the ACME certificate renewal support.
|
||||
|
||||
Shared secrets, that will be initialized to random values if not set:
|
||||
|
||||
* ``SYNAPSE_REGISTRATION_SHARED_SECRET``, secret for registrering users if
|
||||
registration is disable.
|
||||
* ``SYNAPSE_MACAROON_SECRET_KEY`` secret for signing access tokens
|
||||
to the server.
|
||||
|
||||
Database specific values (will use SQLite if not set):
|
||||
|
||||
* `POSTGRES_DB` - The database name for the synapse postgres
|
||||
database. [default: `synapse`]
|
||||
* `POSTGRES_HOST` - The host of the postgres database if you wish to use
|
||||
postgresql instead of sqlite3. [default: `db` which is useful when using a
|
||||
container on the same docker network in a compose file where the postgres
|
||||
service is called `db`]
|
||||
* `POSTGRES_PASSWORD` - The password for the synapse postgres database. **If
|
||||
this is set then postgres will be used instead of sqlite3.** [default: none]
|
||||
**NOTE**: You are highly encouraged to use postgresql! Please use the compose
|
||||
file to make it easier to deploy.
|
||||
* `POSTGRES_USER` - The user for the synapse postgres database. [default:
|
||||
`synapse`]
|
||||
|
||||
Mail server specific values (will not send emails if not set):
|
||||
|
||||
* ``SYNAPSE_SMTP_HOST``, hostname to the mail server.
|
||||
* ``SYNAPSE_SMTP_PORT``, TCP port for accessing the mail server [default
|
||||
``25``].
|
||||
* ``SYNAPSE_SMTP_USER``, username for authenticating against the mail server if
|
||||
any.
|
||||
* ``SYNAPSE_SMTP_PASSWORD``, password for authenticating against the mail
|
||||
server if any.
|
||||
|
||||
### Generating a config file
|
||||
|
||||
It is possible to generate a basic configuration file for use with
|
||||
`SYNAPSE_CONFIG_PATH` using the `generate` commandline option. You will need to
|
||||
specify values for `SYNAPSE_CONFIG_PATH`, `SYNAPSE_SERVER_NAME` and
|
||||
`SYNAPSE_REPORT_STATS`, and mount a docker volume to store the data on. For
|
||||
example:
|
||||
You will need to specify values for the `SYNAPSE_SERVER_NAME` and
|
||||
`SYNAPSE_REPORT_STATS` environment variable, and mount a docker volume to store
|
||||
the configuration on. For example:
|
||||
|
||||
```
|
||||
docker run -it --rm
|
||||
docker run -it --rm \
|
||||
--mount type=volume,src=synapse-data,dst=/data \
|
||||
-e SYNAPSE_CONFIG_PATH=/data/homeserver.yaml \
|
||||
-e SYNAPSE_SERVER_NAME=my.matrix.host \
|
||||
-e SYNAPSE_REPORT_STATS=yes \
|
||||
matrixdotorg/synapse:latest generate
|
||||
```
|
||||
|
||||
This will generate a `homeserver.yaml` in (typically)
|
||||
`/var/lib/docker/volumes/synapse-data/_data`, which you can then customise and
|
||||
use with:
|
||||
For information on picking a suitable server name, see
|
||||
https://github.com/matrix-org/synapse/blob/master/INSTALL.md.
|
||||
|
||||
The above command will generate a `homeserver.yaml` in (typically)
|
||||
`/var/lib/docker/volumes/synapse-data/_data`. You should check this file, and
|
||||
customise it to your needs.
|
||||
|
||||
The following environment variables are supported in `generate` mode:
|
||||
|
||||
* `SYNAPSE_SERVER_NAME` (mandatory): the server public hostname.
|
||||
* `SYNAPSE_REPORT_STATS` (mandatory, `yes` or `no`): whether to enable
|
||||
anonymous statistics reporting.
|
||||
* `SYNAPSE_CONFIG_DIR`: where additional config files (such as the log config
|
||||
and event signing key) will be stored. Defaults to `/data`.
|
||||
* `SYNAPSE_CONFIG_PATH`: path to the file to be generated. Defaults to
|
||||
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
|
||||
* `SYNAPSE_DATA_DIR`: where the generated config will put persistent data
|
||||
such as the datatase and media store. Defaults to `/data`.
|
||||
* `UID`, `GID`: the user id and group id to use for creating the data
|
||||
directories. Defaults to `991`, `991`.
|
||||
|
||||
## Running synapse
|
||||
|
||||
Once you have a valid configuration file, you can start synapse as follows:
|
||||
|
||||
```
|
||||
docker run -d --name synapse \
|
||||
--mount type=volume,src=synapse-data,dst=/data \
|
||||
-e SYNAPSE_CONFIG_PATH=/data/homeserver.yaml \
|
||||
-p 8008:8008 \
|
||||
matrixdotorg/synapse:latest
|
||||
```
|
||||
|
||||
You can then check that it has started correctly with:
|
||||
|
||||
```
|
||||
docker logs synapse
|
||||
```
|
||||
|
||||
If all is well, you should now be able to connect to http://localhost:8008 and
|
||||
see a confirmation message.
|
||||
|
||||
The following environment variables are supported in run mode:
|
||||
|
||||
* `SYNAPSE_CONFIG_DIR`: where additional config files are stored. Defaults to
|
||||
`/data`.
|
||||
* `SYNAPSE_CONFIG_PATH`: path to the config file. Defaults to
|
||||
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
|
||||
* `UID`, `GID`: the user and group id to run Synapse as. Defaults to `991`, `991`.
|
||||
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
||||
|
||||
## TLS support
|
||||
|
||||
The default configuration exposes a single HTTP port: http://localhost:8008. It
|
||||
is suitable for local testing, but for any practical use, you will either need
|
||||
to use a reverse proxy, or configure Synapse to expose an HTTPS port.
|
||||
|
||||
For documentation on using a reverse proxy, see
|
||||
https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst.
|
||||
|
||||
For more information on enabling TLS support in synapse itself, see
|
||||
https://github.com/matrix-org/synapse/blob/master/INSTALL.md#tls-certificates. Of
|
||||
course, you will need to expose the TLS port from the container with a `-p`
|
||||
argument to `docker run`.
|
||||
|
||||
## Legacy dynamic configuration file support
|
||||
|
||||
For backwards-compatibility only, the docker image supports creating a dynamic
|
||||
configuration file based on environment variables. This is now deprecated, but
|
||||
is enabled when the `SYNAPSE_SERVER_NAME` variable is set (and `generate` is
|
||||
not given).
|
||||
|
||||
To migrate from a dynamic configuration file to a static one, run the docker
|
||||
container once with the environment variables set, and `migrate_config`
|
||||
commandline option. For example:
|
||||
|
||||
```
|
||||
docker run -it --rm \
|
||||
--mount type=volume,src=synapse-data,dst=/data \
|
||||
-e SYNAPSE_SERVER_NAME=my.matrix.host \
|
||||
-e SYNAPSE_REPORT_STATS=yes \
|
||||
matrixdotorg/synapse:latest migrate_config
|
||||
```
|
||||
|
||||
This will generate the same configuration file as the legacy mode used, but
|
||||
will store it in `/data/homeserver.yaml` instead of a temporary location. You
|
||||
can then use it as shown above at [Running synapse](#running-synapse).
|
||||
|
||||
@@ -21,7 +21,7 @@ server_name: "{{ SYNAPSE_SERVER_NAME }}"
|
||||
pid_file: /homeserver.pid
|
||||
web_client: False
|
||||
soft_file_limit: 0
|
||||
log_config: "/compiled/log.config"
|
||||
log_config: "{{ SYNAPSE_LOG_CONFIG }}"
|
||||
|
||||
## Ports ##
|
||||
|
||||
@@ -207,22 +207,3 @@ perspectives:
|
||||
|
||||
password_config:
|
||||
enabled: true
|
||||
|
||||
{% if SYNAPSE_SMTP_HOST %}
|
||||
email:
|
||||
enable_notifs: false
|
||||
smtp_host: "{{ SYNAPSE_SMTP_HOST }}"
|
||||
smtp_port: {{ SYNAPSE_SMTP_PORT or "25" }}
|
||||
smtp_user: "{{ SYNAPSE_SMTP_USER }}"
|
||||
smtp_pass: "{{ SYNAPSE_SMTP_PASSWORD }}"
|
||||
require_transport_security: False
|
||||
notif_from: "{{ SYNAPSE_SMTP_FROM or "hostmaster@" + SYNAPSE_SERVER_NAME }}"
|
||||
app_name: Matrix
|
||||
# if template_dir is unset, uses the example templates that are part of
|
||||
# the Synapse distribution.
|
||||
#template_dir: res/templates
|
||||
notif_template_html: notif_mail.html
|
||||
notif_template_text: notif_mail.txt
|
||||
notif_for_new_users: True
|
||||
riot_base_url: "https://{{ SYNAPSE_SERVER_NAME }}"
|
||||
{% endif %}
|
||||
|
||||
@@ -16,14 +16,11 @@ handlers:
|
||||
filters: [context]
|
||||
|
||||
loggers:
|
||||
synapse:
|
||||
level: {{ SYNAPSE_LOG_LEVEL or "WARNING" }}
|
||||
|
||||
synapse.storage.SQL:
|
||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
||||
# information such as access tokens.
|
||||
level: {{ SYNAPSE_LOG_LEVEL or "WARNING" }}
|
||||
level: INFO
|
||||
|
||||
root:
|
||||
level: {{ SYNAPSE_LOG_LEVEL or "WARNING" }}
|
||||
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
||||
handlers: [console]
|
||||
|
||||
@@ -17,4 +17,4 @@ su -c '/usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start'
|
||||
# Run the tests
|
||||
cd /src
|
||||
export TRIAL_FLAGS="-j 4"
|
||||
tox --workdir=/tmp -e py27-postgres
|
||||
tox --workdir=/tmp -e py35-postgres
|
||||
|
||||
268
docker/start.py
268
docker/start.py
@@ -1,77 +1,243 @@
|
||||
#!/usr/local/bin/python
|
||||
|
||||
import jinja2
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import glob
|
||||
import codecs
|
||||
import glob
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
# Utility functions
|
||||
convert = lambda src, dst, environ: open(dst, "w").write(jinja2.Template(open(src).read()).render(**environ))
|
||||
def log(txt):
|
||||
print(txt, file=sys.stderr)
|
||||
|
||||
def check_arguments(environ, args):
|
||||
for argument in args:
|
||||
if argument not in environ:
|
||||
print("Environment variable %s is mandatory, exiting." % argument)
|
||||
sys.exit(2)
|
||||
|
||||
def generate_secrets(environ, secrets):
|
||||
def error(txt):
|
||||
log(txt)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def convert(src, dst, environ):
|
||||
"""Generate a file from a template
|
||||
|
||||
Args:
|
||||
src (str): path to input file
|
||||
dst (str): path to file to write
|
||||
environ (dict): environment dictionary, for replacement mappings.
|
||||
"""
|
||||
with open(src) as infile:
|
||||
template = infile.read()
|
||||
rendered = jinja2.Template(template).render(**environ)
|
||||
with open(dst, "w") as outfile:
|
||||
outfile.write(rendered)
|
||||
|
||||
|
||||
def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
"""Generate a homeserver.yaml from environment variables
|
||||
|
||||
Args:
|
||||
config_dir (str): where to put generated config files
|
||||
config_path (str): where to put the main config file
|
||||
environ (dict): environment dictionary
|
||||
ownership (str): "<user>:<group>" string which will be used to set
|
||||
ownership of the generated configs
|
||||
"""
|
||||
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
||||
if v not in environ:
|
||||
error(
|
||||
"Environment variable '%s' is mandatory when generating a config file."
|
||||
% (v,)
|
||||
)
|
||||
|
||||
# populate some params from data files (if they exist, else create new ones)
|
||||
environ = environ.copy()
|
||||
secrets = {
|
||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||
}
|
||||
|
||||
for name, secret in secrets.items():
|
||||
if secret not in environ:
|
||||
filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
|
||||
|
||||
# if the file already exists, load in the existing value; otherwise,
|
||||
# generate a new secret and write it to a file
|
||||
|
||||
if os.path.exists(filename):
|
||||
with open(filename) as handle: value = handle.read()
|
||||
log("Reading %s from %s" % (secret, filename))
|
||||
with open(filename) as handle:
|
||||
value = handle.read()
|
||||
else:
|
||||
print("Generating a random secret for {}".format(name))
|
||||
log("Generating a random secret for {}".format(secret))
|
||||
value = codecs.encode(os.urandom(32), "hex").decode()
|
||||
with open(filename, "w") as handle: handle.write(value)
|
||||
with open(filename, "w") as handle:
|
||||
handle.write(value)
|
||||
environ[secret] = value
|
||||
|
||||
# Prepare the configuration
|
||||
mode = sys.argv[1] if len(sys.argv) > 1 else None
|
||||
environ = os.environ.copy()
|
||||
ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991))
|
||||
args = ["python", "-m", "synapse.app.homeserver"]
|
||||
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
|
||||
if not os.path.exists(config_dir):
|
||||
os.mkdir(config_dir)
|
||||
|
||||
# In generate mode, generate a configuration, missing keys, then exit
|
||||
if mode == "generate":
|
||||
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH"))
|
||||
args += [
|
||||
"--server-name", environ["SYNAPSE_SERVER_NAME"],
|
||||
"--report-stats", environ["SYNAPSE_REPORT_STATS"],
|
||||
"--config-path", environ["SYNAPSE_CONFIG_PATH"],
|
||||
"--generate-config"
|
||||
# Convert SYNAPSE_NO_TLS to boolean if exists
|
||||
if "SYNAPSE_NO_TLS" in environ:
|
||||
tlsanswerstring = str.lower(environ["SYNAPSE_NO_TLS"])
|
||||
if tlsanswerstring in ("true", "on", "1", "yes"):
|
||||
environ["SYNAPSE_NO_TLS"] = True
|
||||
else:
|
||||
if tlsanswerstring in ("false", "off", "0", "no"):
|
||||
environ["SYNAPSE_NO_TLS"] = False
|
||||
else:
|
||||
error(
|
||||
'Environment variable "SYNAPSE_NO_TLS" found but value "'
|
||||
+ tlsanswerstring
|
||||
+ '" unrecognized; exiting.'
|
||||
)
|
||||
|
||||
if "SYNAPSE_LOG_CONFIG" not in environ:
|
||||
environ["SYNAPSE_LOG_CONFIG"] = config_dir + "/log.config"
|
||||
|
||||
log("Generating synapse config file " + config_path)
|
||||
convert("/conf/homeserver.yaml", config_path, environ)
|
||||
|
||||
log_config_file = environ["SYNAPSE_LOG_CONFIG"]
|
||||
log("Generating log config file " + log_config_file)
|
||||
convert("/conf/log.config", log_config_file, environ)
|
||||
|
||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||
|
||||
# Hopefully we already have a signing key, but generate one if not.
|
||||
subprocess.check_output(
|
||||
[
|
||||
"su-exec",
|
||||
ownership,
|
||||
"python",
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
config_path,
|
||||
# tell synapse to put generated keys in /data rather than /compiled
|
||||
"--keys-directory",
|
||||
config_dir,
|
||||
"--generate-keys",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def run_generate_config(environ, ownership):
|
||||
"""Run synapse with a --generate-config param to generate a template config file
|
||||
|
||||
Args:
|
||||
environ (dict): env var dict
|
||||
ownership (str): "userid:groupid" arg for chmod
|
||||
|
||||
Never returns.
|
||||
"""
|
||||
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
||||
if v not in environ:
|
||||
error("Environment variable '%s' is mandatory in `generate` mode." % (v,))
|
||||
|
||||
server_name = environ["SYNAPSE_SERVER_NAME"]
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||
|
||||
# create a suitable log config from our template
|
||||
log_config_file = "%s/%s.log.config" % (config_dir, server_name)
|
||||
if not os.path.exists(log_config_file):
|
||||
log("Creating log config %s" % (log_config_file,))
|
||||
convert("/conf/log.config", log_config_file, environ)
|
||||
|
||||
# make sure that synapse has perms to write to the data dir.
|
||||
subprocess.check_output(["chown", ownership, data_dir])
|
||||
|
||||
args = [
|
||||
"python",
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--server-name",
|
||||
server_name,
|
||||
"--report-stats",
|
||||
environ["SYNAPSE_REPORT_STATS"],
|
||||
"--config-path",
|
||||
config_path,
|
||||
"--config-directory",
|
||||
config_dir,
|
||||
"--data-directory",
|
||||
data_dir,
|
||||
"--generate-config",
|
||||
"--open-private-ports",
|
||||
]
|
||||
# log("running %s" % (args, ))
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
# In normal mode, generate missing keys if any, then run synapse
|
||||
else:
|
||||
if "SYNAPSE_CONFIG_PATH" in environ:
|
||||
config_path = environ["SYNAPSE_CONFIG_PATH"]
|
||||
else:
|
||||
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"))
|
||||
generate_secrets(environ, {
|
||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY"
|
||||
})
|
||||
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
|
||||
if not os.path.exists("/compiled"): os.mkdir("/compiled")
|
||||
|
||||
def main(args, environ):
|
||||
mode = args[1] if len(args) > 1 else None
|
||||
ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991))
|
||||
|
||||
# In generate mode, generate a configuration and missing keys, then exit
|
||||
if mode == "generate":
|
||||
return run_generate_config(environ, ownership)
|
||||
|
||||
if mode == "migrate_config":
|
||||
# generate a config based on environment vars.
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get(
|
||||
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
||||
)
|
||||
return generate_config_from_template(
|
||||
config_dir, config_path, environ, ownership
|
||||
)
|
||||
|
||||
if mode is not None:
|
||||
error("Unknown execution mode '%s'" % (mode,))
|
||||
|
||||
if "SYNAPSE_SERVER_NAME" in environ:
|
||||
# backwards-compatibility generate-a-config-on-the-fly mode
|
||||
if "SYNAPSE_CONFIG_PATH" in environ:
|
||||
error(
|
||||
"SYNAPSE_SERVER_NAME and SYNAPSE_CONFIG_PATH are mutually exclusive "
|
||||
"except in `generate` or `migrate_config` mode."
|
||||
)
|
||||
|
||||
config_path = "/compiled/homeserver.yaml"
|
||||
log(
|
||||
"Generating config file '%s' on-the-fly from environment variables.\n"
|
||||
"Note that this mode is deprecated. You can migrate to a static config\n"
|
||||
"file by running with 'migrate_config'. See the README for more details."
|
||||
% (config_path,)
|
||||
)
|
||||
|
||||
convert("/conf/homeserver.yaml", config_path, environ)
|
||||
convert("/conf/log.config", "/compiled/log.config", environ)
|
||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||
generate_config_from_template("/compiled", config_path, environ, ownership)
|
||||
else:
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get(
|
||||
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
||||
)
|
||||
if not os.path.exists(config_path):
|
||||
error(
|
||||
"Config file '%s' does not exist. You should either create a new "
|
||||
"config file by running with the `generate` argument (and then edit "
|
||||
"the resulting file before restarting) or specify the path to an "
|
||||
"existing config file with the SYNAPSE_CONFIG_PATH variable."
|
||||
% (config_path,)
|
||||
)
|
||||
|
||||
log("Starting synapse with config file " + config_path)
|
||||
|
||||
args += [
|
||||
"--config-path", config_path,
|
||||
|
||||
# tell synapse to put any generated keys in /data rather than /compiled
|
||||
"--keys-directory", "/data",
|
||||
args = [
|
||||
"su-exec",
|
||||
ownership,
|
||||
"python",
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
config_path,
|
||||
]
|
||||
os.execv("/sbin/su-exec", args)
|
||||
|
||||
# Generate missing keys and start synapse
|
||||
subprocess.check_output(args + ["--generate-keys"])
|
||||
os.execv("/sbin/su-exec", ["su-exec", ownership] + args)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv, os.environ)
|
||||
|
||||
@@ -7,6 +7,7 @@ Requires a public/private key pair from:
|
||||
|
||||
https://developers.google.com/recaptcha/
|
||||
|
||||
Must be a reCAPTCHA v2 key using the "I'm not a robot" Checkbox option
|
||||
|
||||
Setting ReCaptcha Keys
|
||||
----------------------
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# MSC1711 Certificates FAQ
|
||||
|
||||
## Historical Note
|
||||
This document was originally written to guide server admins through the upgrade
|
||||
path towards Synapse 1.0. Specifically,
|
||||
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/master/proposals/1711-x509-for-federation.md)
|
||||
required that all servers present valid TLS certificates on their federation
|
||||
API. Admins were encouraged to achieve compliance from version 0.99.0 (released
|
||||
in February 2019) ahead of version 1.0 (released June 2019) enforcing the
|
||||
certificate checks.
|
||||
|
||||
Much of what follows is now outdated since most admins will have already
|
||||
upgraded, however it may be of use to those with old installs returning to the
|
||||
project.
|
||||
|
||||
If you are setting up a server from scratch you almost certainly should look at
|
||||
the [installation guide](../INSTALL.md) instead.
|
||||
|
||||
## Introduction
|
||||
The goal of Synapse 0.99.0 is to act as a stepping stone to Synapse 1.0.0. It
|
||||
supports the r0.1 release of the server to server specification, but is
|
||||
compatible with both the legacy Matrix federation behaviour (pre-r0.1) as well
|
||||
@@ -68,16 +85,14 @@ Admins should upgrade and configure a valid CA cert. Homeservers that require a
|
||||
.well-known entry (see below), should retain their SRV record and use it
|
||||
alongside their .well-known record.
|
||||
|
||||
**>= 5th March 2019 - Synapse 1.0.0 is released**
|
||||
**10th June 2019 - Synapse 1.0.0 is released**
|
||||
|
||||
1.0.0 will land no sooner than 1 month after 0.99.0, leaving server admins one
|
||||
month after 5th February to upgrade to 0.99.0 and deploy their certificates. In
|
||||
1.0.0 is scheduled for release on 10th June. In
|
||||
accordance with the the [S2S spec](https://matrix.org/docs/spec/server_server/r0.1.0.html)
|
||||
1.0.0 will enforce certificate validity. This means that any homeserver without a
|
||||
valid certificate after this point will no longer be able to federate with
|
||||
1.0.0 servers.
|
||||
|
||||
|
||||
## Configuring certificates for compatibility with Synapse 1.0.0
|
||||
|
||||
### If you do not currently have an SRV record
|
||||
@@ -145,12 +160,11 @@ You can do this with a `.well-known` file as follows:
|
||||
1. Keep the SRV record in place - it is needed for backwards compatibility
|
||||
with Synapse 0.34 and earlier.
|
||||
|
||||
2. Give synapse a certificate corresponding to the target domain
|
||||
(`customer.example.net` in the above example). Currently Synapse's ACME
|
||||
support [does not support
|
||||
this](https://github.com/matrix-org/synapse/issues/4552), so you will have
|
||||
to acquire a certificate yourself and give it to Synapse via
|
||||
`tls_certificate_path` and `tls_private_key_path`.
|
||||
2. Give Synapse a certificate corresponding to the target domain
|
||||
(`customer.example.net` in the above example). You can either use Synapse's
|
||||
built-in [ACME support](./ACME.md) for this (via the `domain` parameter in
|
||||
the `acme` section), or acquire a certificate yourself and give it to
|
||||
Synapse via `tls_certificate_path` and `tls_private_key_path`.
|
||||
|
||||
3. Restart Synapse to ensure the new certificate is loaded.
|
||||
|
||||
@@ -177,7 +191,6 @@ You can do this with a `.well-known` file as follows:
|
||||
on `customer.example.net:8000` it correctly handles HTTP requests with
|
||||
Host header set to `customer.example.net:8000`.
|
||||
|
||||
|
||||
## FAQ
|
||||
|
||||
### Synapse 0.99.0 has just been released, what do I need to do right now?
|
||||
|
||||
42
docs/admin_api/account_validity.rst
Normal file
42
docs/admin_api/account_validity.rst
Normal file
@@ -0,0 +1,42 @@
|
||||
Account validity API
|
||||
====================
|
||||
|
||||
This API allows a server administrator to manage the validity of an account. To
|
||||
use it, you must enable the account validity feature (under
|
||||
``account_validity``) in Synapse's configuration.
|
||||
|
||||
Renew account
|
||||
-------------
|
||||
|
||||
This API extends the validity of an account by as much time as configured in the
|
||||
``period`` parameter from the ``account_validity`` configuration.
|
||||
|
||||
The API is::
|
||||
|
||||
POST /_synapse/admin/v1/account_validity/validity
|
||||
|
||||
with the following body:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"user_id": "<user ID for the account to renew>",
|
||||
"expiration_ts": 0,
|
||||
"enable_renewal_emails": true
|
||||
}
|
||||
|
||||
|
||||
``expiration_ts`` is an optional parameter and overrides the expiration date,
|
||||
which otherwise defaults to now + validity period.
|
||||
|
||||
``enable_renewal_emails`` is also an optional parameter and enables/disables
|
||||
sending renewal emails to the user. Defaults to true.
|
||||
|
||||
The API returns with the new expiration date for this account, as a timestamp in
|
||||
milliseconds since epoch:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"expiration_ts": 0
|
||||
}
|
||||
@@ -8,7 +8,7 @@ being deleted.
|
||||
The API is:
|
||||
|
||||
```
|
||||
POST /_matrix/client/r0/admin/delete_group/<group_id>
|
||||
POST /_synapse/admin/v1/delete_group/<group_id>
|
||||
```
|
||||
|
||||
including an `access_token` of a server admin.
|
||||
|
||||
@@ -4,7 +4,7 @@ This API gets a list of known media in a room.
|
||||
|
||||
The API is:
|
||||
```
|
||||
GET /_matrix/client/r0/admin/room/<room_id>/media
|
||||
GET /_synapse/admin/v1/room/<room_id>/media
|
||||
```
|
||||
including an `access_token` of a server admin.
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ paginate further back in the room from the point being purged from.
|
||||
|
||||
The API is:
|
||||
|
||||
``POST /_matrix/client/r0/admin/purge_history/<room_id>[/<event_id>]``
|
||||
``POST /_synapse/admin/v1/purge_history/<room_id>[/<event_id>]``
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
@@ -49,7 +49,7 @@ Purge status query
|
||||
|
||||
It is possible to poll for updates on recent purges with a second API;
|
||||
|
||||
``GET /_matrix/client/r0/admin/purge_history_status/<purge_id>``
|
||||
``GET /_synapse/admin/v1/purge_history_status/<purge_id>``
|
||||
|
||||
(again, with a suitable ``access_token``). This API returns a JSON body like
|
||||
the following:
|
||||
|
||||
@@ -6,7 +6,7 @@ media.
|
||||
|
||||
The API is::
|
||||
|
||||
POST /_matrix/client/r0/admin/purge_media_cache?before_ts=<unix_timestamp_in_ms>&access_token=<access_token>
|
||||
POST /_synapse/admin/v1/purge_media_cache?before_ts=<unix_timestamp_in_ms>&access_token=<access_token>
|
||||
|
||||
{}
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ is not enabled.
|
||||
|
||||
To fetch the nonce, you need to request one from the API::
|
||||
|
||||
> GET /_matrix/client/r0/admin/register
|
||||
> GET /_synapse/admin/v1/register
|
||||
|
||||
< {"nonce": "thisisanonce"}
|
||||
|
||||
@@ -22,7 +22,7 @@ body containing the nonce, username, password, whether they are an admin
|
||||
|
||||
As an example::
|
||||
|
||||
> POST /_matrix/client/r0/admin/register
|
||||
> POST /_synapse/admin/v1/register
|
||||
> {
|
||||
"nonce": "thisisanonce",
|
||||
"username": "pepper_roni",
|
||||
|
||||
48
docs/admin_api/server_notices.md
Normal file
48
docs/admin_api/server_notices.md
Normal file
@@ -0,0 +1,48 @@
|
||||
# Server Notices
|
||||
|
||||
The API to send notices is as follows:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/send_server_notice
|
||||
```
|
||||
|
||||
or:
|
||||
|
||||
```
|
||||
PUT /_synapse/admin/v1/send_server_notice/{txnId}
|
||||
```
|
||||
|
||||
You will need to authenticate with an access token for an admin user.
|
||||
|
||||
When using the `PUT` form, retransmissions with the same transaction ID will be
|
||||
ignored in the same way as with `PUT
|
||||
/_matrix/client/r0/rooms/{roomId}/send/{eventType}/{txnId}`.
|
||||
|
||||
The request body should look something like the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"user_id": "@target_user:server_name",
|
||||
"content": {
|
||||
"msgtype": "m.text",
|
||||
"body": "This is my message"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You can optionally include the following additional parameters:
|
||||
|
||||
* `type`: the type of event. Defaults to `m.room.message`.
|
||||
* `state_key`: Setting this will result in a state event being sent.
|
||||
|
||||
|
||||
Once the notice has been sent, the API will return the following response:
|
||||
|
||||
```json
|
||||
{
|
||||
"event_id": "<event_id>"
|
||||
}
|
||||
```
|
||||
|
||||
Note that server notices must be enabled in `homeserver.yaml` before this API
|
||||
can be used. See [server_notices.md](../server_notices.md) for more information.
|
||||
@@ -5,7 +5,7 @@ This API returns information about a specific user account.
|
||||
|
||||
The api is::
|
||||
|
||||
GET /_matrix/client/r0/admin/whois/<user_id>
|
||||
GET /_synapse/admin/v1/whois/<user_id>
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
@@ -50,7 +50,7 @@ references to it).
|
||||
|
||||
The api is::
|
||||
|
||||
POST /_matrix/client/r0/admin/deactivate/<user_id>
|
||||
POST /_synapse/admin/v1/deactivate/<user_id>
|
||||
|
||||
with a body of:
|
||||
|
||||
@@ -69,11 +69,11 @@ An empty body may be passed for backwards compatibility.
|
||||
Reset password
|
||||
==============
|
||||
|
||||
Changes the password of another user.
|
||||
Changes the password of another user. This will automatically log the user out of all their devices.
|
||||
|
||||
The api is::
|
||||
|
||||
POST /_matrix/client/r0/admin/reset_password/<user_id>
|
||||
POST /_synapse/admin/v1/reset_password/<user_id>
|
||||
|
||||
with a body of:
|
||||
|
||||
|
||||
@@ -8,9 +8,7 @@ contains Synapse version information).
|
||||
|
||||
The api is::
|
||||
|
||||
GET /_matrix/client/r0/admin/server_version
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
GET /_synapse/admin/v1/server_version
|
||||
|
||||
It returns a JSON body like the following:
|
||||
|
||||
|
||||
@@ -1,43 +1,60 @@
|
||||
- Everything should comply with PEP8. Code should pass
|
||||
``pep8 --max-line-length=100`` without any warnings.
|
||||
# Code Style
|
||||
|
||||
- **Indenting**:
|
||||
The Synapse codebase uses a number of code formatting tools in order to
|
||||
quickly and automatically check for formatting (and sometimes logical) errors
|
||||
in code.
|
||||
|
||||
- NEVER tabs. 4 spaces to indent.
|
||||
The necessary tools are detailed below.
|
||||
|
||||
- follow PEP8; either hanging indent or multiline-visual indent depending
|
||||
on the size and shape of the arguments and what makes more sense to the
|
||||
author. In other words, both this::
|
||||
## Formatting tools
|
||||
|
||||
print("I am a fish %s" % "moo")
|
||||
The Synapse codebase uses [black](https://pypi.org/project/black/) as an
|
||||
opinionated code formatter, ensuring all comitted code is properly
|
||||
formatted.
|
||||
|
||||
and this::
|
||||
First install ``black`` with::
|
||||
|
||||
print("I am a fish %s" %
|
||||
"moo")
|
||||
pip install --upgrade black
|
||||
|
||||
and this::
|
||||
Have ``black`` auto-format your code (it shouldn't change any
|
||||
functionality) with::
|
||||
|
||||
print(
|
||||
"I am a fish %s" %
|
||||
"moo",
|
||||
)
|
||||
black . --exclude="\.tox|build|env"
|
||||
|
||||
...are valid, although given each one takes up 2x more vertical space than
|
||||
the previous, it's up to the author's discretion as to which layout makes
|
||||
most sense for their function invocation. (e.g. if they want to add
|
||||
comments per-argument, or put expressions in the arguments, or group
|
||||
related arguments together, or want to deliberately extend or preserve
|
||||
vertical/horizontal space)
|
||||
- **flake8**
|
||||
|
||||
- **Line length**:
|
||||
``flake8`` is a code checking tool. We require code to pass ``flake8`` before being merged into the codebase.
|
||||
|
||||
Max line length is 79 chars (with flexibility to overflow by a "few chars" if
|
||||
the overflowing content is not semantically significant and avoids an
|
||||
explosion of vertical whitespace).
|
||||
Install ``flake8`` with::
|
||||
|
||||
Use parentheses instead of ``\`` for line continuation where ever possible
|
||||
(which is pretty much everywhere).
|
||||
pip install --upgrade flake8
|
||||
|
||||
Check all application and test code with::
|
||||
|
||||
flake8 synapse tests
|
||||
|
||||
- **isort**
|
||||
|
||||
``isort`` ensures imports are nicely formatted, and can suggest and
|
||||
auto-fix issues such as double-importing.
|
||||
|
||||
Install ``isort`` with::
|
||||
|
||||
pip install --upgrade isort
|
||||
|
||||
Auto-fix imports with::
|
||||
|
||||
isort -rc synapse tests
|
||||
|
||||
``-rc`` means to recursively search the given directories.
|
||||
|
||||
It's worth noting that modern IDEs and text editors can run these tools
|
||||
automatically on save. It may be worth looking into whether this
|
||||
functionality is supported in your editor for a more convenient development
|
||||
workflow. It is not, however, recommended to run ``flake8`` on save as it
|
||||
takes a while and is very resource intensive.
|
||||
|
||||
## General rules
|
||||
|
||||
- **Naming**:
|
||||
|
||||
@@ -46,26 +63,6 @@
|
||||
|
||||
- Use double quotes ``"foo"`` rather than single quotes ``'foo'``.
|
||||
|
||||
- **Blank lines**:
|
||||
|
||||
- There should be max a single new line between:
|
||||
|
||||
- statements
|
||||
- functions in a class
|
||||
|
||||
- There should be two new lines between:
|
||||
|
||||
- definitions in a module (e.g., between different classes)
|
||||
|
||||
- **Whitespace**:
|
||||
|
||||
There should be spaces where spaces should be and not where there shouldn't
|
||||
be:
|
||||
|
||||
- a single space after a comma
|
||||
- a single space before and after for '=' when used as assignment
|
||||
- no spaces before and after for '=' for default values and keyword arguments.
|
||||
|
||||
- **Comments**: should follow the `google code style
|
||||
<http://google.github.io/styleguide/pyguide.html?showone=Comments#Comments>`_.
|
||||
This is so that we can generate documentation with `sphinx
|
||||
@@ -76,7 +73,7 @@
|
||||
|
||||
- **Imports**:
|
||||
|
||||
- Prefer to import classes and functions than packages or modules.
|
||||
- Prefer to import classes and functions rather than packages or modules.
|
||||
|
||||
Example::
|
||||
|
||||
|
||||
@@ -14,9 +14,9 @@ up and will work provided you set the ``server_name`` to match your
|
||||
machine's public DNS hostname, and provide Synapse with a TLS certificate
|
||||
which is valid for your ``server_name``.
|
||||
|
||||
Once you have completed the steps necessary to federate, you should be able to
|
||||
join a room via federation. (A good place to start is ``#synapse:matrix.org`` - a
|
||||
room for Synapse admins.)
|
||||
Once federation has been configured, you should be able to join a room over
|
||||
federation. A good place to start is ``#synapse:matrix.org`` - a room for
|
||||
Synapse admins.
|
||||
|
||||
|
||||
## Delegation
|
||||
@@ -98,6 +98,77 @@ _matrix._tcp.<server_name>``. In our example, we would expect this:
|
||||
Note that the target of a SRV record cannot be an alias (CNAME record): it has to point
|
||||
directly to the server hosting the synapse instance.
|
||||
|
||||
### Delegation FAQ
|
||||
#### When do I need a SRV record or .well-known URI?
|
||||
|
||||
If your homeserver listens on the default federation port (8448), and your
|
||||
`server_name` points to the host that your homeserver runs on, you do not need an SRV
|
||||
record or `.well-known/matrix/server` URI.
|
||||
|
||||
For instance, if you registered `example.com` and pointed its DNS A record at a
|
||||
fresh server, you could install Synapse on that host,
|
||||
giving it a `server_name` of `example.com`, and once [ACME](acme.md) support is enabled,
|
||||
it would automatically generate a valid TLS certificate for you via Let's Encrypt
|
||||
and no SRV record or .well-known URI would be needed.
|
||||
|
||||
This is the common case, although you can add an SRV record or
|
||||
`.well-known/matrix/server` URI for completeness if you wish.
|
||||
|
||||
**However**, if your server does not listen on port 8448, or if your `server_name`
|
||||
does not point to the host that your homeserver runs on, you will need to let
|
||||
other servers know how to find it. The way to do this is via .well-known or an
|
||||
SRV record.
|
||||
|
||||
#### I have created a .well-known URI. Do I still need an SRV record?
|
||||
|
||||
As of Synapse 0.99, Synapse will first check for the existence of a .well-known
|
||||
URI and follow any delegation it suggests. It will only then check for the
|
||||
existence of an SRV record.
|
||||
|
||||
That means that the SRV record will often be redundant. However, you should
|
||||
remember that there may still be older versions of Synapse in the federation
|
||||
which do not understand .well-known URIs, so if you removed your SRV record
|
||||
you would no longer be able to federate with them.
|
||||
|
||||
It is therefore best to leave the SRV record in place for now. Synapse 0.34 and
|
||||
earlier will follow the SRV record (and not care about the invalid
|
||||
certificate). Synapse 0.99 and later will follow the .well-known URI, with the
|
||||
correct certificate chain.
|
||||
|
||||
#### Can I manage my own certificates rather than having Synapse renew certificates itself?
|
||||
|
||||
Yes, you are welcome to manage your certificates yourself. Synapse will only
|
||||
attempt to obtain certificates from Let's Encrypt if you configure it to do
|
||||
so.The only requirement is that there is a valid TLS cert present for
|
||||
federation end points.
|
||||
|
||||
#### Do you still recommend against using a reverse proxy on the federation port?
|
||||
|
||||
We no longer actively recommend against using a reverse proxy. Many admins will
|
||||
find it easier to direct federation traffic to a reverse proxy and manage their
|
||||
own TLS certificates, and this is a supported configuration.
|
||||
|
||||
See [reverse_proxy.rst](reverse_proxy.rst) for information on setting up a
|
||||
reverse proxy.
|
||||
|
||||
#### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?
|
||||
|
||||
Practically speaking, this is no longer necessary.
|
||||
|
||||
If you are using a reverse proxy for all of your TLS traffic, then you can set
|
||||
`no_tls: True` in the Synapse config. In that case, the only reason Synapse
|
||||
needs the certificate is to populate a legacy `tls_fingerprints` field in the
|
||||
federation API. This is ignored by Synapse 0.99.0 and later, and the only time
|
||||
pre-0.99 Synapses will check it is when attempting to fetch the server keys -
|
||||
and generally this is delegated via `matrix.org`, which will be running a modern
|
||||
version of Synapse.
|
||||
|
||||
#### Do I need the same certificate for the client and federation port?
|
||||
|
||||
No. There is nothing stopping you from using different certificates,
|
||||
particularly if you are using a reverse proxy. However, Synapse will use the
|
||||
same certificate on any ports where TLS is configured.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
You can use the [federation tester](
|
||||
|
||||
@@ -48,7 +48,10 @@ How to monitor Synapse metrics using Prometheus
|
||||
- job_name: "synapse"
|
||||
metrics_path: "/_synapse/metrics"
|
||||
static_configs:
|
||||
- targets: ["my.server.here:9092"]
|
||||
- targets: ["my.server.here:port"]
|
||||
|
||||
where ``my.server.here`` is the IP address of Synapse, and ``port`` is the listener port
|
||||
configured with the ``metrics`` resource.
|
||||
|
||||
If your prometheus is older than 1.5.2, you will need to replace
|
||||
``static_configs`` in the above with ``target_groups``.
|
||||
|
||||
@@ -1,7 +1,29 @@
|
||||
Using Postgres
|
||||
--------------
|
||||
|
||||
Postgres version 9.4 or later is known to work.
|
||||
Postgres version 9.5 or later is known to work.
|
||||
|
||||
Install postgres client libraries
|
||||
=================================
|
||||
|
||||
Synapse will require the python postgres client library in order to connect to
|
||||
a postgres database.
|
||||
|
||||
* If you are using the `matrix.org debian/ubuntu
|
||||
packages <../INSTALL.md#matrixorg-packages>`_,
|
||||
the necessary libraries will already be installed.
|
||||
|
||||
* For other pre-built packages, please consult the documentation from the
|
||||
relevant package.
|
||||
|
||||
* If you installed synapse `in a virtualenv
|
||||
<../INSTALL.md#installing-from-source>`_, you can install the library with::
|
||||
|
||||
~/synapse/env/bin/pip install matrix-synapse[postgres]
|
||||
|
||||
(substituting the path to your virtualenv for ``~/synapse/env``, if you used a
|
||||
different path). You will require the postgres development files. These are in
|
||||
the ``libpq-dev`` package on Debian-derived distributions.
|
||||
|
||||
Set up database
|
||||
===============
|
||||
@@ -26,29 +48,6 @@ encoding use, e.g.::
|
||||
This would create an appropriate database named ``synapse`` owned by the
|
||||
``synapse_user`` user (which must already exist).
|
||||
|
||||
Set up client in Debian/Ubuntu
|
||||
===========================
|
||||
|
||||
Postgres support depends on the postgres python connector ``psycopg2``. In the
|
||||
virtual env::
|
||||
|
||||
sudo apt-get install libpq-dev
|
||||
pip install psycopg2
|
||||
|
||||
Set up client in RHEL/CentOs 7
|
||||
==============================
|
||||
|
||||
Make sure you have the appropriate version of postgres-devel installed. For a
|
||||
postgres 9.4, use the postgres 9.4 packages from
|
||||
[here](https://wiki.postgresql.org/wiki/YUM_Installation).
|
||||
|
||||
As with Debian/Ubuntu, postgres support depends on the postgres python connector
|
||||
``psycopg2``. In the virtual env::
|
||||
|
||||
sudo yum install postgresql-devel libpqxx-devel.x86_64
|
||||
export PATH=/usr/pgsql-9.4/bin/:$PATH
|
||||
pip install psycopg2
|
||||
|
||||
Tuning Postgres
|
||||
===============
|
||||
|
||||
|
||||
@@ -69,6 +69,7 @@ Let's assume that we expect clients to connect to our server at
|
||||
SSLEngine on
|
||||
ServerName matrix.example.com;
|
||||
|
||||
AllowEncodedSlashes NoDecode
|
||||
ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon
|
||||
ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix
|
||||
</VirtualHost>
|
||||
@@ -77,6 +78,7 @@ Let's assume that we expect clients to connect to our server at
|
||||
SSLEngine on
|
||||
ServerName example.com;
|
||||
|
||||
AllowEncodedSlashes NoDecode
|
||||
ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon
|
||||
ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix
|
||||
</VirtualHost>
|
||||
@@ -87,8 +89,10 @@ Let's assume that we expect clients to connect to our server at
|
||||
bind :::443 v4v6 ssl crt /etc/ssl/haproxy/ strict-sni alpn h2,http/1.1
|
||||
|
||||
# Matrix client traffic
|
||||
acl matrix hdr(host) -i matrix.example.com
|
||||
use_backend matrix if matrix
|
||||
acl matrix-host hdr(host) -i matrix.example.com
|
||||
acl matrix-path path_beg /_matrix
|
||||
|
||||
use_backend matrix if matrix-host matrix-path
|
||||
|
||||
frontend matrix-federation
|
||||
bind :::8448 v4v6 ssl crt /etc/ssl/haproxy/synapse.pem alpn h2,http/1.1
|
||||
|
||||
@@ -23,29 +23,6 @@ server_name: "SERVERNAME"
|
||||
#
|
||||
pid_file: DATADIR/homeserver.pid
|
||||
|
||||
# CPU affinity mask. Setting this restricts the CPUs on which the
|
||||
# process will be scheduled. It is represented as a bitmask, with the
|
||||
# lowest order bit corresponding to the first logical CPU and the
|
||||
# highest order bit corresponding to the last logical CPU. Not all CPUs
|
||||
# may exist on a given system but a mask may specify more CPUs than are
|
||||
# present.
|
||||
#
|
||||
# For example:
|
||||
# 0x00000001 is processor #0,
|
||||
# 0x00000003 is processors #0 and #1,
|
||||
# 0xFFFFFFFF is all processors (#0 through #31).
|
||||
#
|
||||
# Pinning a Python process to a single CPU is desirable, because Python
|
||||
# is inherently single-threaded due to the GIL, and can suffer a
|
||||
# 30-40% slowdown due to cache blow-out and thread context switching
|
||||
# if the scheduler happens to schedule the underlying threads across
|
||||
# different cores. See
|
||||
# https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
|
||||
#
|
||||
# This setting requires the affinity package to be installed!
|
||||
#
|
||||
#cpu_affinity: 0xFFFFFFFF
|
||||
|
||||
# The path to the web client which will be served at /_matrix/client/
|
||||
# if 'webclient' is configured under the 'listeners' configuration.
|
||||
#
|
||||
@@ -69,6 +46,34 @@ pid_file: DATADIR/homeserver.pid
|
||||
#
|
||||
#use_presence: false
|
||||
|
||||
# Whether to require authentication to retrieve profile data (avatars,
|
||||
# display names) of other users through the client API. Defaults to
|
||||
# 'false'. Note that profile data is also available via the federation
|
||||
# API, so this setting is of limited value if federation is enabled on
|
||||
# the server.
|
||||
#
|
||||
#require_auth_for_profile_requests: true
|
||||
|
||||
# If set to 'false', requires authentication to access the server's public rooms
|
||||
# directory through the client API. Defaults to 'true'.
|
||||
#
|
||||
#allow_public_rooms_without_auth: false
|
||||
|
||||
# If set to 'false', forbids any other homeserver to fetch the server's public
|
||||
# rooms directory via federation. Defaults to 'true'.
|
||||
#
|
||||
#allow_public_rooms_over_federation: false
|
||||
|
||||
# The default room version for newly created rooms.
|
||||
#
|
||||
# Known room versions are listed here:
|
||||
# https://matrix.org/docs/spec/#complete-list-of-room-versions
|
||||
#
|
||||
# For example, for room version 1, default_room_version should be set
|
||||
# to "1".
|
||||
#
|
||||
#default_room_version: "4"
|
||||
|
||||
# The GC threshold parameters to pass to `gc.set_threshold`, if defined
|
||||
#
|
||||
#gc_thresholds: [700, 10, 10]
|
||||
@@ -101,6 +106,24 @@ pid_file: DATADIR/homeserver.pid
|
||||
# - nyc.example.com
|
||||
# - syd.example.com
|
||||
|
||||
# Prevent federation requests from being sent to the following
|
||||
# blacklist IP address CIDR ranges. If this option is not specified, or
|
||||
# specified with an empty list, no ip range blacklist will be enforced.
|
||||
#
|
||||
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
|
||||
# listed here, since they correspond to unroutable addresses.)
|
||||
#
|
||||
federation_ip_range_blacklist:
|
||||
- '127.0.0.0/8'
|
||||
- '10.0.0.0/8'
|
||||
- '172.16.0.0/12'
|
||||
- '192.168.0.0/16'
|
||||
- '100.64.0.0/10'
|
||||
- '169.254.0.0/16'
|
||||
- '::1/128'
|
||||
- 'fe80::/64'
|
||||
- 'fc00::/7'
|
||||
|
||||
# List of ports that Synapse should listen on, their purpose and their
|
||||
# configuration.
|
||||
#
|
||||
@@ -136,8 +159,8 @@ pid_file: DATADIR/homeserver.pid
|
||||
#
|
||||
# Valid resource names are:
|
||||
#
|
||||
# client: the client-server API (/_matrix/client). Also implies 'media' and
|
||||
# 'static'.
|
||||
# client: the client-server API (/_matrix/client), and the synapse admin
|
||||
# API (/_synapse/admin). Also implies 'media' and 'static'.
|
||||
#
|
||||
# consent: user consent forms (/_matrix/consent). See
|
||||
# docs/consent_tracking.md.
|
||||
@@ -190,7 +213,7 @@ listeners:
|
||||
- names: [client, federation]
|
||||
compress: false
|
||||
|
||||
# example additonal_resources:
|
||||
# example additional_resources:
|
||||
#
|
||||
#additional_resources:
|
||||
# "/_matrix/my/custom/endpoint":
|
||||
@@ -219,6 +242,22 @@ listeners:
|
||||
|
||||
# Monthly Active User Blocking
|
||||
#
|
||||
# Used in cases where the admin or server owner wants to limit to the
|
||||
# number of monthly active users.
|
||||
#
|
||||
# 'limit_usage_by_mau' disables/enables monthly active user blocking. When
|
||||
# anabled and a limit is reached the server returns a 'ResourceLimitError'
|
||||
# with error type Codes.RESOURCE_LIMIT_EXCEEDED
|
||||
#
|
||||
# 'max_mau_value' is the hard limit of monthly active users above which
|
||||
# the server will start blocking user actions.
|
||||
#
|
||||
# 'mau_trial_days' is a means to add a grace period for active users. It
|
||||
# means that users must be active for this number of days before they
|
||||
# can be considered active and guards against the case where lots of users
|
||||
# sign up in a short space of time never to return after their initial
|
||||
# session.
|
||||
#
|
||||
#limit_usage_by_mau: False
|
||||
#max_mau_value: 50
|
||||
#mau_trial_days: 2
|
||||
@@ -236,6 +275,20 @@ listeners:
|
||||
# - medium: 'email'
|
||||
# address: 'reserved_user@example.com'
|
||||
|
||||
# Used by phonehome stats to group together related servers.
|
||||
#server_context: context
|
||||
|
||||
# Whether to require a user to be in the room to add an alias to it.
|
||||
# Defaults to 'true'.
|
||||
#
|
||||
#require_membership_for_aliases: false
|
||||
|
||||
# Whether to allow per-room membership profiles through the send of membership
|
||||
# events with profile information that differ from the target's global profile.
|
||||
# Defaults to 'true'.
|
||||
#
|
||||
#allow_per_room_profiles: false
|
||||
|
||||
|
||||
## TLS ##
|
||||
|
||||
@@ -257,6 +310,49 @@ listeners:
|
||||
#
|
||||
#tls_private_key_path: "CONFDIR/SERVERNAME.tls.key"
|
||||
|
||||
# Whether to verify TLS server certificates for outbound federation requests.
|
||||
#
|
||||
# Defaults to `true`. To disable certificate verification, uncomment the
|
||||
# following line.
|
||||
#
|
||||
#federation_verify_certificates: false
|
||||
|
||||
# The minimum TLS version that will be used for outbound federation requests.
|
||||
#
|
||||
# Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note
|
||||
# that setting this value higher than `1.2` will prevent federation to most
|
||||
# of the public Matrix network: only configure it to `1.3` if you have an
|
||||
# entirely private federation setup and you can ensure TLS 1.3 support.
|
||||
#
|
||||
#federation_client_minimum_tls_version: 1.2
|
||||
|
||||
# Skip federation certificate verification on the following whitelist
|
||||
# of domains.
|
||||
#
|
||||
# This setting should only be used in very specific cases, such as
|
||||
# federation over Tor hidden services and similar. For private networks
|
||||
# of homeservers, you likely want to use a private CA instead.
|
||||
#
|
||||
# Only effective if federation_verify_certicates is `true`.
|
||||
#
|
||||
#federation_certificate_verification_whitelist:
|
||||
# - lon.example.com
|
||||
# - *.domain.com
|
||||
# - *.onion
|
||||
|
||||
# List of custom certificate authorities for federation traffic.
|
||||
#
|
||||
# This setting should only normally be used within a private network of
|
||||
# homeservers.
|
||||
#
|
||||
# Note that this list will replace those that are provided by your
|
||||
# operating environment. Certificates must be in PEM format.
|
||||
#
|
||||
#federation_custom_ca_list:
|
||||
# - myCA1.pem
|
||||
# - myCA2.pem
|
||||
# - myCA3.pem
|
||||
|
||||
# ACME support: This will configure Synapse to request a valid TLS certificate
|
||||
# for your configured `server_name` via Let's Encrypt.
|
||||
#
|
||||
@@ -319,6 +415,13 @@ acme:
|
||||
#
|
||||
#domain: matrix.example.com
|
||||
|
||||
# file to use for the account key. This will be generated if it doesn't
|
||||
# exist.
|
||||
#
|
||||
# If unspecified, we will use CONFDIR/client.key.
|
||||
#
|
||||
account_key_file: DATADIR/acme_account.key
|
||||
|
||||
# List of allowed TLS fingerprints for this server to publish along
|
||||
# with the signing keys for this server. Other matrix servers that
|
||||
# make HTTPS requests to this server will check that the TLS
|
||||
@@ -372,21 +475,15 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
|
||||
## Ratelimiting ##
|
||||
|
||||
# Number of messages a client can send per second
|
||||
#
|
||||
#rc_messages_per_second: 0.2
|
||||
|
||||
# Number of message a client can send before being throttled
|
||||
#
|
||||
#rc_message_burst_count: 10.0
|
||||
|
||||
# Ratelimiting settings for registration and login.
|
||||
# Ratelimiting settings for client actions (registration, login, messaging).
|
||||
#
|
||||
# Each ratelimiting configuration is made of two parameters:
|
||||
# - per_second: number of requests a client can send per second.
|
||||
# - burst_count: number of requests a client can send before being throttled.
|
||||
#
|
||||
# Synapse currently uses the following configurations:
|
||||
# - one for messages that ratelimits sending based on the account the client
|
||||
# is using
|
||||
# - one for registration that ratelimits registration requests based on the
|
||||
# client's IP address.
|
||||
# - one for login that ratelimits login requests based on the client's IP
|
||||
@@ -399,6 +496,10 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
#
|
||||
# The defaults are as shown below.
|
||||
#
|
||||
#rc_message:
|
||||
# per_second: 0.2
|
||||
# burst_count: 10
|
||||
#
|
||||
#rc_registration:
|
||||
# per_second: 0.17
|
||||
# burst_count: 3
|
||||
@@ -414,29 +515,28 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
# per_second: 0.17
|
||||
# burst_count: 3
|
||||
|
||||
# The federation window size in milliseconds
|
||||
#
|
||||
#federation_rc_window_size: 1000
|
||||
|
||||
# The number of federation requests from a single server in a window
|
||||
# before the server will delay processing the request.
|
||||
# Ratelimiting settings for incoming federation
|
||||
#
|
||||
#federation_rc_sleep_limit: 10
|
||||
|
||||
# The duration in milliseconds to delay processing events from
|
||||
# remote servers by if they go over the sleep limit.
|
||||
# The rc_federation configuration is made up of the following settings:
|
||||
# - window_size: window size in milliseconds
|
||||
# - sleep_limit: number of federation requests from a single server in
|
||||
# a window before the server will delay processing the request.
|
||||
# - sleep_delay: duration in milliseconds to delay processing events
|
||||
# from remote servers by if they go over the sleep limit.
|
||||
# - reject_limit: maximum number of concurrent federation requests
|
||||
# allowed from a single server
|
||||
# - concurrent: number of federation requests to concurrently process
|
||||
# from a single server
|
||||
#
|
||||
#federation_rc_sleep_delay: 500
|
||||
|
||||
# The maximum number of concurrent federation requests allowed
|
||||
# from a single server
|
||||
# The defaults are as shown below.
|
||||
#
|
||||
#federation_rc_reject_limit: 50
|
||||
|
||||
# The number of federation requests to concurrently process from a
|
||||
# single server
|
||||
#
|
||||
#federation_rc_concurrent: 3
|
||||
#rc_federation:
|
||||
# window_size: 1000
|
||||
# sleep_limit: 10
|
||||
# sleep_delay: 500
|
||||
# reject_limit: 50
|
||||
# concurrent: 3
|
||||
|
||||
# Target outgoing federation transaction frequency for sending read-receipts,
|
||||
# per-room.
|
||||
@@ -506,11 +606,12 @@ uploads_path: "DATADIR/uploads"
|
||||
# height: 600
|
||||
# method: scale
|
||||
|
||||
# Is the preview URL API enabled? If enabled, you *must* specify
|
||||
# an explicit url_preview_ip_range_blacklist of IPs that the spider is
|
||||
# denied from accessing.
|
||||
# Is the preview URL API enabled?
|
||||
#
|
||||
#url_preview_enabled: false
|
||||
# 'false' by default: uncomment the following to enable it (and specify a
|
||||
# url_preview_ip_range_blacklist blacklist).
|
||||
#
|
||||
#url_preview_enabled: true
|
||||
|
||||
# List of IP address CIDR ranges that the URL preview spider is denied
|
||||
# from accessing. There are no defaults: you must explicitly
|
||||
@@ -520,6 +621,12 @@ uploads_path: "DATADIR/uploads"
|
||||
# synapse to issue arbitrary GET requests to your internal services,
|
||||
# causing serious security issues.
|
||||
#
|
||||
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
|
||||
# listed here, since they correspond to unroutable addresses.)
|
||||
#
|
||||
# This must be specified if url_preview_enabled is set. It is recommended that
|
||||
# you uncomment the following list as a starting point.
|
||||
#
|
||||
#url_preview_ip_range_blacklist:
|
||||
# - '127.0.0.0/8'
|
||||
# - '10.0.0.0/8'
|
||||
@@ -530,7 +637,7 @@ uploads_path: "DATADIR/uploads"
|
||||
# - '::1/128'
|
||||
# - 'fe80::/64'
|
||||
# - 'fc00::/7'
|
||||
#
|
||||
|
||||
# List of IP address CIDR ranges that the URL preview spider is allowed
|
||||
# to access even if they are specified in url_preview_ip_range_blacklist.
|
||||
# This is useful for specifying exceptions to wide-ranging blacklisted
|
||||
@@ -643,6 +750,42 @@ uploads_path: "DATADIR/uploads"
|
||||
#
|
||||
#enable_registration: false
|
||||
|
||||
# Optional account validity configuration. This allows for accounts to be denied
|
||||
# any request after a given period.
|
||||
#
|
||||
# ``enabled`` defines whether the account validity feature is enabled. Defaults
|
||||
# to False.
|
||||
#
|
||||
# ``period`` allows setting the period after which an account is valid
|
||||
# after its registration. When renewing the account, its validity period
|
||||
# will be extended by this amount of time. This parameter is required when using
|
||||
# the account validity feature.
|
||||
#
|
||||
# ``renew_at`` is the amount of time before an account's expiry date at which
|
||||
# Synapse will send an email to the account's email address with a renewal link.
|
||||
# This needs the ``email`` and ``public_baseurl`` configuration sections to be
|
||||
# filled.
|
||||
#
|
||||
# ``renew_email_subject`` is the subject of the email sent out with the renewal
|
||||
# link. ``%(app)s`` can be used as a placeholder for the ``app_name`` parameter
|
||||
# from the ``email`` section.
|
||||
#
|
||||
# Once this feature is enabled, Synapse will look for registered users without an
|
||||
# expiration date at startup and will add one to every account it found using the
|
||||
# current settings at that time.
|
||||
# This means that, if a validity period is set, and Synapse is restarted (it will
|
||||
# then derive an expiration date from the current validity period), and some time
|
||||
# after that the validity period changes and Synapse is restarted, the users'
|
||||
# expiration dates won't be updated unless their account is manually renewed. This
|
||||
# date will be randomly selected within a range [now + period - d ; now + period],
|
||||
# where d is equal to 10% of the validity period.
|
||||
#
|
||||
#account_validity:
|
||||
# enabled: True
|
||||
# period: 6w
|
||||
# renew_at: 1w
|
||||
# renew_email_subject: "Renew your %(app)s account"
|
||||
|
||||
# The user must provide all of the below types of 3PID when registering.
|
||||
#
|
||||
#registrations_require_3pid:
|
||||
@@ -806,12 +949,43 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key"
|
||||
|
||||
# The trusted servers to download signing keys from.
|
||||
#
|
||||
#perspectives:
|
||||
# servers:
|
||||
# "matrix.org":
|
||||
# verify_keys:
|
||||
# "ed25519:auto":
|
||||
# key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
||||
# When we need to fetch a signing key, each server is tried in parallel.
|
||||
#
|
||||
# Normally, the connection to the key server is validated via TLS certificates.
|
||||
# Additional security can be provided by configuring a `verify key`, which
|
||||
# will make synapse check that the response is signed by that key.
|
||||
#
|
||||
# This setting supercedes an older setting named `perspectives`. The old format
|
||||
# is still supported for backwards-compatibility, but it is deprecated.
|
||||
#
|
||||
# Options for each entry in the list include:
|
||||
#
|
||||
# server_name: the name of the server. required.
|
||||
#
|
||||
# verify_keys: an optional map from key id to base64-encoded public key.
|
||||
# If specified, we will check that the response is signed by at least
|
||||
# one of the given keys.
|
||||
#
|
||||
# accept_keys_insecurely: a boolean. Normally, if `verify_keys` is unset,
|
||||
# and federation_verify_certificates is not `true`, synapse will refuse
|
||||
# to start, because this would allow anyone who can spoof DNS responses
|
||||
# to masquerade as the trusted key server. If you know what you are doing
|
||||
# and are sure that your network environment provides a secure connection
|
||||
# to the key server, you can set this to `true` to override this
|
||||
# behaviour.
|
||||
#
|
||||
# An example configuration might look like:
|
||||
#
|
||||
#trusted_key_servers:
|
||||
# - server_name: "my_trusted_server.example.com"
|
||||
# verify_keys:
|
||||
# "ed25519:auto": "abcdefghijklmnopqrstuvwxyzabcdefghijklmopqr"
|
||||
# - server_name: "my_other_trusted_server.example.com"
|
||||
#
|
||||
# The default configuration is:
|
||||
#
|
||||
#trusted_key_servers:
|
||||
# - server_name: "matrix.org"
|
||||
|
||||
|
||||
# Enable SAML2 for registration and login. Uses pysaml2.
|
||||
@@ -823,6 +997,12 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key"
|
||||
# so it is not normally necessary to specify them unless you need to
|
||||
# override them.
|
||||
#
|
||||
# Once SAML support is enabled, a metadata file will be exposed at
|
||||
# https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to
|
||||
# use to configure your SAML IdP with. Alternatively, you can manually configure
|
||||
# the IdP to use an ACS location of
|
||||
# https://<server>:<port>/_matrix/saml2/authn_response.
|
||||
#
|
||||
#saml2_config:
|
||||
# sp_config:
|
||||
# # point this to the IdP's metadata. You can use either a local file or
|
||||
@@ -832,7 +1012,15 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key"
|
||||
# remote:
|
||||
# - url: https://our_idp/metadata.xml
|
||||
#
|
||||
# # The rest of sp_config is just used to generate our metadata xml, and you
|
||||
# # By default, the user has to go to our login page first. If you'd like to
|
||||
# # allow IdP-initiated login, set 'allow_unsolicited: True' in a
|
||||
# # 'service.sp' section:
|
||||
# #
|
||||
# #service:
|
||||
# # sp:
|
||||
# # allow_unsolicited: True
|
||||
#
|
||||
# # The examples below are just used to generate our metadata xml, and you
|
||||
# # may well not need it, depending on your setup. Alternatively you
|
||||
# # may need a whole lot more detail - see the pysaml2 docs!
|
||||
#
|
||||
@@ -855,6 +1043,12 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key"
|
||||
# # separate pysaml2 configuration file:
|
||||
# #
|
||||
# config_path: "CONFDIR/sp_conf.py"
|
||||
#
|
||||
# # the lifetime of a SAML session. This defines how long a user has to
|
||||
# # complete the authentication process, if allow_unsolicited is unset.
|
||||
# # The default is 5 minutes.
|
||||
# #
|
||||
# # saml_session_lifetime: 5m
|
||||
|
||||
|
||||
|
||||
@@ -881,6 +1075,12 @@ password_config:
|
||||
#
|
||||
#enabled: false
|
||||
|
||||
# Uncomment to disable authentication against the local password
|
||||
# database. This is ignored if `enabled` is false, and is only useful
|
||||
# if you have other password_providers.
|
||||
#
|
||||
#localdb_enabled: false
|
||||
|
||||
# Uncomment and change to a secret random string for extra security.
|
||||
# DO NOT CHANGE THIS AFTER INITIAL SETUP!
|
||||
#
|
||||
@@ -888,10 +1088,8 @@ password_config:
|
||||
|
||||
|
||||
|
||||
# Enable sending emails for notification events
|
||||
# Defining a custom URL for Riot is only needed if email notifications
|
||||
# should contain links to a self-hosted installation of Riot; when set
|
||||
# the "app_name" setting is ignored.
|
||||
# Enable sending emails for password resets, notification events or
|
||||
# account expiry notices
|
||||
#
|
||||
# If your SMTP server requires authentication, the optional smtp_user &
|
||||
# smtp_pass variables should be used
|
||||
@@ -899,19 +1097,72 @@ password_config:
|
||||
#email:
|
||||
# enable_notifs: false
|
||||
# smtp_host: "localhost"
|
||||
# smtp_port: 25
|
||||
# smtp_port: 25 # SSL: 465, STARTTLS: 587
|
||||
# smtp_user: "exampleusername"
|
||||
# smtp_pass: "examplepassword"
|
||||
# require_transport_security: False
|
||||
# notif_from: "Your Friendly %(app)s Home Server <noreply@example.com>"
|
||||
# app_name: Matrix
|
||||
# # if template_dir is unset, uses the example templates that are part of
|
||||
# # the Synapse distribution.
|
||||
#
|
||||
# # Enable email notifications by default
|
||||
# #
|
||||
# notif_for_new_users: True
|
||||
#
|
||||
# # Defining a custom URL for Riot is only needed if email notifications
|
||||
# # should contain links to a self-hosted installation of Riot; when set
|
||||
# # the "app_name" setting is ignored
|
||||
# #
|
||||
# riot_base_url: "http://localhost/riot"
|
||||
#
|
||||
# # Enable sending password reset emails via the configured, trusted
|
||||
# # identity servers
|
||||
# #
|
||||
# # IMPORTANT! This will give a malicious or overtaken identity server
|
||||
# # the ability to reset passwords for your users! Make absolutely sure
|
||||
# # that you want to do this! It is strongly recommended that password
|
||||
# # reset emails be sent by the homeserver instead
|
||||
# #
|
||||
# # If this option is set to false and SMTP options have not been
|
||||
# # configured, resetting user passwords via email will be disabled
|
||||
# #
|
||||
# #trust_identity_server_for_password_resets: false
|
||||
#
|
||||
# # Configure the time that a validation email or text message code
|
||||
# # will expire after sending
|
||||
# #
|
||||
# # This is currently used for password resets
|
||||
# #
|
||||
# #validation_token_lifetime: 1h
|
||||
#
|
||||
# # Template directory. All template files should be stored within this
|
||||
# # directory. If not set, default templates from within the Synapse
|
||||
# # package will be used
|
||||
# #
|
||||
# # For the list of default templates, please see
|
||||
# # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
|
||||
# #
|
||||
# #template_dir: res/templates
|
||||
#
|
||||
# # Templates for email notifications
|
||||
# #
|
||||
# notif_template_html: notif_mail.html
|
||||
# notif_template_text: notif_mail.txt
|
||||
# notif_for_new_users: True
|
||||
# riot_base_url: "http://localhost/riot"
|
||||
#
|
||||
# # Templates for account expiry notices
|
||||
# #
|
||||
# expiry_template_html: notice_expiry.html
|
||||
# expiry_template_text: notice_expiry.txt
|
||||
#
|
||||
# # Templates for password reset emails sent by the homeserver
|
||||
# #
|
||||
# #password_reset_template_html: password_reset.html
|
||||
# #password_reset_template_text: password_reset.txt
|
||||
#
|
||||
# # Templates for password reset success and failure pages that a user
|
||||
# # will see after attempting to reset their password
|
||||
# #
|
||||
# #password_reset_template_success_html: password_reset_success.html
|
||||
# #password_reset_template_failure_html: password_reset_failure.html
|
||||
|
||||
|
||||
#password_providers:
|
||||
@@ -972,9 +1223,9 @@ password_config:
|
||||
#
|
||||
# 'search_all_users' defines whether to search all users visible to your HS
|
||||
# when searching the user directory, rather than limiting to users visible
|
||||
# in public rooms. Defaults to false. If you set it True, you'll have to run
|
||||
# UPDATE user_directory_stream_pos SET stream_id = NULL;
|
||||
# on your database to tell it to rebuild the user_directory search indexes.
|
||||
# in public rooms. Defaults to false. If you set it True, you'll have to
|
||||
# rebuild the user_directory search indexes, see
|
||||
# https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
|
||||
#
|
||||
#user_directory:
|
||||
# enabled: true
|
||||
@@ -1032,6 +1283,22 @@ password_config:
|
||||
#
|
||||
|
||||
|
||||
|
||||
# Local statistics collection. Used in populating the room directory.
|
||||
#
|
||||
# 'bucket_size' controls how large each statistics timeslice is. It can
|
||||
# be defined in a human readable short form -- e.g. "1d", "1y".
|
||||
#
|
||||
# 'retention' controls how long historical statistics will be kept for.
|
||||
# It can be defined in a human readable short form -- e.g. "1d", "1y".
|
||||
#
|
||||
#
|
||||
#stats:
|
||||
# enabled: true
|
||||
# bucket_size: 1d
|
||||
# retention: 1y
|
||||
|
||||
|
||||
# Server Notices room configuration
|
||||
#
|
||||
# Uncomment this section to enable a room which can be used to send notices
|
||||
@@ -1115,3 +1382,16 @@ password_config:
|
||||
# alias: "*"
|
||||
# room_id: "*"
|
||||
# action: allow
|
||||
|
||||
|
||||
# Server admins can define a Python module that implements extra rules for
|
||||
# allowing or denying incoming events. In order to work, this module needs to
|
||||
# override the methods defined in synapse/events/third_party_rules.py.
|
||||
#
|
||||
# This feature is designed to be used in closed federations only, where each
|
||||
# participating server enforces the same rules.
|
||||
#
|
||||
#third_party_event_rules:
|
||||
# module: "my_custom_project.SuperRulesSet"
|
||||
# config:
|
||||
# example_option: 'things'
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
Server Notices
|
||||
==============
|
||||
# Server Notices
|
||||
|
||||
'Server Notices' are a new feature introduced in Synapse 0.30. They provide a
|
||||
channel whereby server administrators can send messages to users on the server.
|
||||
@@ -11,8 +10,7 @@ they may also find a use for features such as "Message of the day".
|
||||
This is a feature specific to Synapse, but it uses standard Matrix
|
||||
communication mechanisms, so should work with any Matrix client.
|
||||
|
||||
User experience
|
||||
---------------
|
||||
## User experience
|
||||
|
||||
When the user is first sent a server notice, they will get an invitation to a
|
||||
room (typically called 'Server Notices', though this is configurable in
|
||||
@@ -29,8 +27,7 @@ levels.
|
||||
Having joined the room, the user can leave the room if they want. Subsequent
|
||||
server notices will then cause a new room to be created.
|
||||
|
||||
Synapse configuration
|
||||
---------------------
|
||||
## Synapse configuration
|
||||
|
||||
Server notices come from a specific user id on the server. Server
|
||||
administrators are free to choose the user id - something like `server` is
|
||||
@@ -58,17 +55,7 @@ room which will be created.
|
||||
`system_mxid_display_name` and `system_mxid_avatar_url` can be used to set the
|
||||
displayname and avatar of the Server Notices user.
|
||||
|
||||
Sending notices
|
||||
---------------
|
||||
## Sending notices
|
||||
|
||||
As of the current version of synapse, there is no convenient interface for
|
||||
sending notices (other than the automated ones sent as part of consent
|
||||
tracking).
|
||||
|
||||
In the meantime, it is possible to test this feature using the manhole. Having
|
||||
gone into the manhole as described in [manhole.md](manhole.md), a notice can be
|
||||
sent with something like:
|
||||
|
||||
```
|
||||
>>> hs.get_server_notices_manager().send_notice('@user:server.com', {'msgtype':'m.text', 'body':'foo'})
|
||||
```
|
||||
To send server notices to users you can use the
|
||||
[admin_api](admin_api/server_notices.md).
|
||||
|
||||
@@ -18,226 +18,220 @@ import os
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinxcontrib.napoleon',
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.coverage",
|
||||
"sphinx.ext.ifconfig",
|
||||
"sphinxcontrib.napoleon",
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u'Synapse'
|
||||
copyright = u'Copyright 2014-2017 OpenMarket Ltd, 2017 Vector Creations Ltd, 2017 New Vector Ltd'
|
||||
project = "Synapse"
|
||||
copyright = (
|
||||
"Copyright 2014-2017 OpenMarket Ltd, 2017 Vector Creations Ltd, 2017 New Vector Ltd"
|
||||
)
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '1.0'
|
||||
version = "1.0"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '1.0'
|
||||
release = "1.0"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
# keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
html_theme = "default"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
# html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
# html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Synapsedoc'
|
||||
htmlhelp_basename = "Synapsedoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'Synapse.tex', u'Synapse Documentation',
|
||||
u'TNG', 'manual'),
|
||||
]
|
||||
latex_documents = [("index", "Synapse.tex", "Synapse Documentation", "TNG", "manual")]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'synapse', u'Synapse Documentation',
|
||||
[u'TNG'], 1)
|
||||
]
|
||||
man_pages = [("index", "synapse", "Synapse Documentation", ["TNG"], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
@@ -246,26 +240,32 @@ man_pages = [
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'Synapse', u'Synapse Documentation',
|
||||
u'TNG', 'Synapse', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
"index",
|
||||
"Synapse",
|
||||
"Synapse Documentation",
|
||||
"TNG",
|
||||
"Synapse",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
)
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
# texinfo_no_detailmenu = False
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'http://docs.python.org/': None}
|
||||
intersphinx_mapping = {"http://docs.python.org/": None}
|
||||
|
||||
napoleon_include_special_with_doc = True
|
||||
napoleon_use_ivar = True
|
||||
|
||||
@@ -7,11 +7,7 @@ who are present in a publicly viewable room present on the server.
|
||||
|
||||
The directory info is stored in various tables, which can (typically after
|
||||
DB corruption) get stale or out of sync. If this happens, for now the
|
||||
quickest solution to fix it is:
|
||||
|
||||
```
|
||||
UPDATE user_directory_stream_pos SET stream_id = NULL;
|
||||
```
|
||||
|
||||
and restart the synapse, which should then start a background task to
|
||||
solution to fix it is to execute the SQL here
|
||||
https://github.com/matrix-org/synapse/blob/master/synapse/storage/schema/delta/53/user_dir_populate.sql
|
||||
and then restart synapse. This should then start a background task to
|
||||
flush the current tables and regenerate the directory.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user